diff --git a/Gopkg.lock b/Gopkg.lock index e9b4cfe2e..52cb9853a 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -25,6 +25,24 @@ revision = "5741799b275a3c4a5a9623a993576d7545cf7b5c" version = "v2.4.0" +[[projects]] + name = "github.com/emicklei/go-restful-swagger12" + packages = ["."] + revision = "dcef7f55730566d41eae5db10e7d6981829720f6" + version = "1.0.1" + +[[projects]] + name = "github.com/fsnotify/fsnotify" + packages = ["."] + revision = "629574ca2a5df945712d3079857300b5e4da0236" + version = "v1.4.2" + +[[projects]] + name = "github.com/ghodss/yaml" + packages = ["."] + revision = "0ca9ea5df5451ffdf184b4428c902747c2c11cd7" + version = "v1.0.0" + [[projects]] branch = "master" name = "github.com/go-openapi/jsonpointer" @@ -61,34 +79,184 @@ packages = ["."] revision = "23def4e6c14b4da8ac2ed8007337bc5eb5007998" +[[projects]] + branch = "master" + name = "github.com/golang/protobuf" + packages = ["proto","ptypes","ptypes/any","ptypes/duration","ptypes/timestamp"] + revision = "1e59b77b52bf8e4b449a57e6f79f21226d571845" + +[[projects]] + branch = "master" + name = "github.com/google/btree" + packages = ["."] + revision = "316fb6d3f031ae8f4d457c6c5186b9e3ded70435" + [[projects]] branch = "master" name = "github.com/google/gofuzz" packages = ["."] revision = "24818f796faf91cd76ec7bddd72458fbced7a6c1" +[[projects]] + name = "github.com/googleapis/gnostic" + packages = ["OpenAPIv2","compiler","extensions"] + revision = "ee43cbb60db7bd22502942cccbc39059117352ab" + version = "v0.1.0" + +[[projects]] + branch = "master" + name = "github.com/gregjones/httpcache" + packages = [".","diskcache"] + revision = "2bcd89a1743fd4b373f7370ce8ddc14dfbd18229" + +[[projects]] + branch = "master" + name = "github.com/hashicorp/hcl" + packages = [".","hcl/ast","hcl/parser","hcl/scanner","hcl/strconv","hcl/token","json/parser","json/scanner","json/token"] + revision = "23c074d0eceb2b8a5bfdbb271ab780cde70f05a8" + +[[projects]] + branch = "master" + name = "github.com/howeyc/gopass" + packages = ["."] + revision = "bf9dde6d0d2c004a008c27aaee91170c786f6db8" + +[[projects]] + name = "github.com/imdario/mergo" + packages = ["."] + revision = "7fe0c75c13abdee74b09fcacef5ea1c6bba6a874" + version = "0.2.4" + +[[projects]] + name = "github.com/inconshreveable/mousetrap" + packages = ["."] + revision = "76626ae9c91c4f2a10f34cad8ce83ea42c93bb75" + version = "v1.0" + +[[projects]] + name = "github.com/json-iterator/go" + packages = ["."] + revision = "f7279a603edee96fe7764d3de9c6ff8cf9970994" + version = "1.0.4" + +[[projects]] + branch = "master" + name = "github.com/juju/ratelimit" + packages = ["."] + revision = "59fac5042749a5afb9af70e813da1dd5474f0167" + +[[projects]] + name = "github.com/magiconair/properties" + packages = ["."] + revision = "be5ece7dd465ab0765a9682137865547526d1dfb" + version = "v1.7.3" + [[projects]] branch = "master" name = "github.com/mailru/easyjson" packages = ["buffer","jlexer","jwriter"] revision = "5f62e4f3afa2f576dc86531b7df4d966b19ef8f8" +[[projects]] + branch = "master" + name = "github.com/mitchellh/go-homedir" + packages = ["."] + revision = "b8bc1bf767474819792c23f32d8286a45736f1c6" + +[[projects]] + branch = "master" + name = "github.com/mitchellh/mapstructure" + packages = ["."] + revision = "06020f85339e21b2478f756a78e295255ffa4d6a" + +[[projects]] + name = "github.com/onsi/ginkgo" + packages = [".","config","internal/codelocation","internal/containernode","internal/failer","internal/leafnodes","internal/remote","internal/spec","internal/spec_iterator","internal/specrunner","internal/suite","internal/testingtproxy","internal/writer","reporters","reporters/stenographer","reporters/stenographer/support/go-colorable","reporters/stenographer/support/go-isatty","types"] + revision = "9eda700730cba42af70d53180f9dcce9266bc2bc" + version = "v1.4.0" + +[[projects]] + name = "github.com/onsi/gomega" + packages = [".","format","gbytes","gexec","internal/assertion","internal/asyncassertion","internal/oraclematcher","internal/testingtsupport","matchers","matchers/support/goraph/bipartitegraph","matchers/support/goraph/edge","matchers/support/goraph/node","matchers/support/goraph/util","types"] + revision = "c893efa28eb45626cdaa76c9f653b62488858837" + version = "v1.2.0" + +[[projects]] + name = "github.com/pelletier/go-toml" + packages = ["."] + revision = "16398bac157da96aa88f98a2df640c7f32af1da2" + version = "v1.0.1" + +[[projects]] + branch = "master" + name = "github.com/petar/GoLLRB" + packages = ["llrb"] + revision = "53be0d36a84c2a886ca057d34b6aa4468df9ccb4" + +[[projects]] + name = "github.com/peterbourgon/diskv" + packages = ["."] + revision = "5f041e8faa004a95c88a202771f4cc3e991971e6" + version = "v2.0.1" + +[[projects]] + name = "github.com/spf13/afero" + packages = [".","mem"] + revision = "8d919cbe7e2627e417f3e45c3c0e489a5b7e2536" + version = "v1.0.0" + +[[projects]] + name = "github.com/spf13/cast" + packages = ["."] + revision = "acbeb36b902d72a7a4c18e8f3241075e7ab763e4" + version = "v1.1.0" + +[[projects]] + name = "github.com/spf13/cobra" + packages = ["."] + revision = "7b2c5ac9fc04fc5efafb60700713d4fa609b777b" + version = "v0.0.1" + +[[projects]] + branch = "master" + name = "github.com/spf13/jwalterweatherman" + packages = ["."] + revision = "12bd96e66386c1960ab0f74ced1362f66f552f7b" + [[projects]] name = "github.com/spf13/pflag" packages = ["."] revision = "e57e3eeb33f795204c1ca35f56c44f83227c6e66" version = "v1.0.0" +[[projects]] + name = "github.com/spf13/viper" + packages = ["."] + revision = "25b30aa063fc18e48662b86996252eabdcf2f0c7" + version = "v1.0.0" + +[[projects]] + branch = "master" + name = "golang.org/x/crypto" + packages = ["ssh/terminal"] + revision = "94eea52f7b742c7cbe0b03b22f0c4c8631ece122" + [[projects]] branch = "master" name = "golang.org/x/net" - packages = ["http2","http2/hpack","idna","lex/httplex"] + packages = ["html","html/atom","html/charset","http2","http2/hpack","idna","lex/httplex"] revision = "a337091b0525af65de94df2eb7e98bd9962dcbe2" +[[projects]] + branch = "master" + name = "golang.org/x/sys" + packages = ["unix","windows"] + revision = "1006bb3484c92b19a5b6612452e038b554fadb9c" + [[projects]] branch = "master" name = "golang.org/x/text" - packages = ["collate","collate/build","internal/colltab","internal/gen","internal/tag","internal/triegen","internal/ucd","language","secure/bidirule","transform","unicode/bidi","unicode/cldr","unicode/norm","unicode/rangetable","width"] + packages = ["collate","collate/build","encoding","encoding/charmap","encoding/htmlindex","encoding/internal","encoding/internal/identifier","encoding/japanese","encoding/korean","encoding/simplifiedchinese","encoding/traditionalchinese","encoding/unicode","internal/colltab","internal/gen","internal/tag","internal/triegen","internal/ucd","internal/utf8internal","language","runes","secure/bidirule","transform","unicode/bidi","unicode/cldr","unicode/norm","unicode/rangetable","width"] revision = "88f656faf3f37f690df1a32515b479415e1a6769" [[projects]] @@ -103,12 +271,24 @@ packages = ["."] revision = "eb3733d160e74a9c7e442f435eb3bea458e1d19f" +[[projects]] + branch = "master" + name = "k8s.io/api" + packages = ["admissionregistration/v1alpha1","apps/v1beta1","apps/v1beta2","authentication/v1","authentication/v1beta1","authorization/v1","authorization/v1beta1","autoscaling/v1","autoscaling/v2beta1","batch/v1","batch/v1beta1","batch/v2alpha1","certificates/v1beta1","core/v1","extensions/v1beta1","networking/v1","policy/v1beta1","rbac/v1","rbac/v1alpha1","rbac/v1beta1","scheduling/v1alpha1","settings/v1alpha1","storage/v1","storage/v1beta1"] + revision = "218912509d74a117d05a718bb926d0948e531c20" + [[projects]] branch = "master" name = "k8s.io/apimachinery" - packages = ["pkg/api/equality","pkg/api/meta","pkg/api/resource","pkg/apis/meta/v1","pkg/apis/meta/v1/unstructured","pkg/apis/meta/v1alpha1","pkg/conversion","pkg/conversion/queryparams","pkg/conversion/unstructured","pkg/fields","pkg/labels","pkg/runtime","pkg/runtime/schema","pkg/selection","pkg/types","pkg/util/diff","pkg/util/errors","pkg/util/intstr","pkg/util/json","pkg/util/net","pkg/util/runtime","pkg/util/sets","pkg/util/validation","pkg/util/validation/field","pkg/util/wait","pkg/watch","third_party/forked/golang/reflect"] + packages = ["pkg/api/equality","pkg/api/errors","pkg/api/meta","pkg/api/resource","pkg/apis/meta/v1","pkg/apis/meta/v1/unstructured","pkg/apis/meta/v1alpha1","pkg/conversion","pkg/conversion/queryparams","pkg/conversion/unstructured","pkg/fields","pkg/labels","pkg/runtime","pkg/runtime/schema","pkg/runtime/serializer","pkg/runtime/serializer/json","pkg/runtime/serializer/protobuf","pkg/runtime/serializer/recognizer","pkg/runtime/serializer/streaming","pkg/runtime/serializer/versioning","pkg/selection","pkg/types","pkg/util/clock","pkg/util/diff","pkg/util/errors","pkg/util/framer","pkg/util/intstr","pkg/util/json","pkg/util/net","pkg/util/runtime","pkg/util/sets","pkg/util/validation","pkg/util/validation/field","pkg/util/wait","pkg/util/yaml","pkg/version","pkg/watch","third_party/forked/golang/reflect"] revision = "18a564baac720819100827c16fdebcadb05b2d0d" +[[projects]] + name = "k8s.io/client-go" + packages = ["discovery","kubernetes","kubernetes/scheme","kubernetes/typed/admissionregistration/v1alpha1","kubernetes/typed/apps/v1beta1","kubernetes/typed/apps/v1beta2","kubernetes/typed/authentication/v1","kubernetes/typed/authentication/v1beta1","kubernetes/typed/authorization/v1","kubernetes/typed/authorization/v1beta1","kubernetes/typed/autoscaling/v1","kubernetes/typed/autoscaling/v2beta1","kubernetes/typed/batch/v1","kubernetes/typed/batch/v1beta1","kubernetes/typed/batch/v2alpha1","kubernetes/typed/certificates/v1beta1","kubernetes/typed/core/v1","kubernetes/typed/extensions/v1beta1","kubernetes/typed/networking/v1","kubernetes/typed/policy/v1beta1","kubernetes/typed/rbac/v1","kubernetes/typed/rbac/v1alpha1","kubernetes/typed/rbac/v1beta1","kubernetes/typed/scheduling/v1alpha1","kubernetes/typed/settings/v1alpha1","kubernetes/typed/storage/v1","kubernetes/typed/storage/v1beta1","pkg/version","rest","rest/watch","tools/auth","tools/clientcmd","tools/clientcmd/api","tools/clientcmd/api/latest","tools/clientcmd/api/v1","tools/metrics","tools/reference","transport","util/cert","util/flowcontrol","util/homedir","util/integer"] + revision = "2ae454230481a7cb5544325e12ad7658ecccd19b" + version = "v5.0.1" + [[projects]] branch = "master" name = "k8s.io/kube-openapi" @@ -118,6 +298,6 @@ [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "132e4fbab5990bffdbf7ad26a6bd17d75e27af4c1bd689b9c22baddf130cbd14" + inputs-digest = "f97c722e792f642563d42011d9e2dff8cd05844996c03c124ceca6409cda38ce" solver-name = "gps-cdcl" solver-version = 1 diff --git a/bin/pre-commit.sh b/bin/pre-commit.sh index c81e9ca08..c88607e48 100755 --- a/bin/pre-commit.sh +++ b/bin/pre-commit.sh @@ -19,7 +19,13 @@ go vet -all ./... rc=$((rc || $?)) echo "Running go test" -go test -v ./... +go list ./... | grep -vF pkg/framework/test | xargs go test -v +rc=$((rc || $?)) + +echo "Running test framework tests" +go get github.com/onsi/ginkgo/ginkgo \ + && ./pkg/framework/test/scripts/download-binaries.sh \ + && ./pkg/framework/test/scripts/run-tests.sh rc=$((rc || $?)) exit $rc diff --git a/pkg/framework/test/.gitignore b/pkg/framework/test/.gitignore new file mode 100644 index 000000000..16308b38c --- /dev/null +++ b/pkg/framework/test/.gitignore @@ -0,0 +1 @@ +assets/bin diff --git a/pkg/framework/test/apiserver.go b/pkg/framework/test/apiserver.go new file mode 100644 index 000000000..785c5cc14 --- /dev/null +++ b/pkg/framework/test/apiserver.go @@ -0,0 +1,88 @@ +package test + +import ( + "fmt" + "os/exec" + "time" + + "github.com/onsi/gomega" + "github.com/onsi/gomega/gbytes" + "github.com/onsi/gomega/gexec" +) + +// APIServer knows how to run a kubernetes apiserver. Set it up with the path to a precompiled binary. +type APIServer struct { + // The path to the apiserver binary + Path string + EtcdURL string + session *gexec.Session + stdOut *gbytes.Buffer + stdErr *gbytes.Buffer + certDirManager certDirManager +} + +type certDirManager interface { + Create() (string, error) + Destroy() error +} + +// Start starts the apiserver, waits for it to come up, and returns an error, if occoured. +func (s *APIServer) Start() error { + s.certDirManager = NewTempDirManager() + s.stdOut = gbytes.NewBuffer() + s.stdErr = gbytes.NewBuffer() + + certDir, err := s.certDirManager.Create() + if err != nil { + return err + } + + args := []string{ + "--authorization-mode=Node,RBAC", + "--runtime-config=admissionregistration.k8s.io/v1alpha1", + "--v=3", "--vmodule=", + "--admission-control=Initializers,NamespaceLifecycle,LimitRanger,ServiceAccount,SecurityContextDeny,DefaultStorageClass,DefaultTolerationSeconds,GenericAdmissionWebhook,ResourceQuota", + "--admission-control-config-file=", + "--bind-address=0.0.0.0", + "--insecure-bind-address=127.0.0.1", + "--insecure-port=8080", + "--storage-backend=etcd3", + fmt.Sprintf("--etcd-servers=%s", s.EtcdURL), + fmt.Sprintf("--cert-dir=%s", certDir), + } + + detectedStart := s.stdErr.Detect("Serving insecurely on 127.0.0.1:8080") + timedOut := time.After(20 * time.Second) + + command := exec.Command(s.Path, args...) + s.session, err = gexec.Start(command, s.stdOut, s.stdErr) + if err != nil { + return err + } + + select { + case <-detectedStart: + return nil + case <-timedOut: + return fmt.Errorf("timeout waiting for apiserver to start serving") + } +} + +// Stop stops this process gracefully, waits for its termination, and cleans up the cert directory. +func (s *APIServer) Stop() { + if s.session != nil { + s.session.Terminate().Wait(20 * time.Second) + err := s.certDirManager.Destroy() + gomega.Expect(err).NotTo(gomega.HaveOccurred()) + } +} + +// ExitCode returns the exit code of the process, if it has exited. If it hasn't exited yet, ExitCode returns -1. +func (s *APIServer) ExitCode() int { + return s.session.ExitCode() +} + +// Buffer implements the gbytes.BufferProvider interface and returns the stdout of the process +func (s *APIServer) Buffer() *gbytes.Buffer { + return s.session.Buffer() +} diff --git a/pkg/framework/test/apiserver_test.go b/pkg/framework/test/apiserver_test.go new file mode 100644 index 000000000..2efcf50e1 --- /dev/null +++ b/pkg/framework/test/apiserver_test.go @@ -0,0 +1,62 @@ +package test_test + +import ( + . "k8s.io/kubectl/pkg/framework/test" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + "github.com/onsi/gomega/gbytes" + "github.com/onsi/gomega/gexec" +) + +var _ = Describe("Apiserver", func() { + + Context("when given a path to a binary that runs for a long time", func() { + It("can start and stop that binary", func() { + pathToFakeAPIServer, err := gexec.Build("k8s.io/kubectl/pkg/framework/test/assets/fakeapiserver") + Expect(err).NotTo(HaveOccurred()) + apiServer := &APIServer{ + Path: pathToFakeAPIServer, + EtcdURL: "the etcd url", + } + + By("Starting the API Server") + err = apiServer.Start() + Expect(err).NotTo(HaveOccurred()) + + Eventually(apiServer).Should(gbytes.Say("Everything is fine")) + Expect(apiServer).NotTo(gexec.Exit()) + + By("Stopping the API Server") + apiServer.Stop() + Expect(apiServer).To(gexec.Exit(143)) + }) + + }) + + Context("when no path is given", func() { + It("fails with a helpful error", func() { + apiServer := &APIServer{} + err := apiServer.Start() + Expect(err).To(MatchError(ContainSubstring("no such file or directory"))) + }) + }) + + Context("when given a path to a non-executable", func() { + It("fails with a helpful error", func() { + apiServer := &APIServer{ + Path: "./apiserver.go", + EtcdURL: "the etcd url", + } + err := apiServer.Start() + Expect(err).To(MatchError(ContainSubstring("./apiserver.go: permission denied"))) + }) + }) + + Context("when we try to stop a server that hasn't been started", func() { + It("does not panic", func() { + server := &APIServer{} + server.Stop() + }) + }) +}) diff --git a/pkg/framework/test/assets/bin/.gitkeep b/pkg/framework/test/assets/bin/.gitkeep new file mode 100644 index 000000000..172cb3f18 --- /dev/null +++ b/pkg/framework/test/assets/bin/.gitkeep @@ -0,0 +1 @@ +This directory will be the home of some binaries which are downloaded with `pkg/framework/test/scripts/download-binaries`. diff --git a/pkg/framework/test/assets/fakeapiserver/apiserver.go b/pkg/framework/test/assets/fakeapiserver/apiserver.go new file mode 100644 index 000000000..47bbf8b7e --- /dev/null +++ b/pkg/framework/test/assets/fakeapiserver/apiserver.go @@ -0,0 +1,44 @@ +package main + +import ( + "fmt" + "os" + "regexp" + "time" +) + +func main() { + expectedArgs := []*regexp.Regexp{ + regexp.MustCompile("^--authorization-mode=Node,RBAC$"), + regexp.MustCompile("^--runtime-config=admissionregistration.k8s.io/v1alpha1$"), + regexp.MustCompile("^--v=3$"), + regexp.MustCompile("^--vmodule=$"), + regexp.MustCompile("^--admission-control=Initializers,NamespaceLifecycle,LimitRanger,ServiceAccount,SecurityContextDeny,DefaultStorageClass,DefaultTolerationSeconds,GenericAdmissionWebhook,ResourceQuota$"), + regexp.MustCompile("^--admission-control-config-file=$"), + regexp.MustCompile("^--bind-address=0.0.0.0$"), + regexp.MustCompile("^--insecure-bind-address=127.0.0.1$"), + regexp.MustCompile("^--insecure-port=8080$"), + regexp.MustCompile("^--storage-backend=etcd3$"), + regexp.MustCompile("^--etcd-servers=the etcd url$"), + regexp.MustCompile("^--cert-dir=.*"), + } + numExpectedArgs := len(expectedArgs) + numGivenArgs := len(os.Args) - 1 + + if numGivenArgs < numExpectedArgs { + fmt.Printf("Expected at least %d args, only got %d\n", numExpectedArgs, numGivenArgs) + os.Exit(2) + } + + for i, argRegexp := range expectedArgs { + givenArg := os.Args[i+1] + if !argRegexp.MatchString(givenArg) { + fmt.Printf("Expected arg '%s' to match '%s'\n", givenArg, argRegexp.String()) + os.Exit(1) + } + } + fmt.Println("Everything is fine") + fmt.Fprintln(os.Stderr, "Serving insecurely on 127.0.0.1:8080") + + time.Sleep(10 * time.Minute) +} diff --git a/pkg/framework/test/assets/fakeetcd/etcd.go b/pkg/framework/test/assets/fakeetcd/etcd.go new file mode 100644 index 000000000..d06704ce9 --- /dev/null +++ b/pkg/framework/test/assets/fakeetcd/etcd.go @@ -0,0 +1,39 @@ +package main + +import ( + "fmt" + "os" + "regexp" + "time" +) + +func main() { + expectedArgs := []*regexp.Regexp{ + regexp.MustCompile("^--debug$"), + regexp.MustCompile("^--advertise-client-urls$"), + regexp.MustCompile("^our etcd url$"), + regexp.MustCompile("^--listen-client-urls$"), + regexp.MustCompile("^our etcd url$"), + regexp.MustCompile("^--data-dir$"), + regexp.MustCompile("^.+"), + } + numExpectedArgs := len(expectedArgs) + numGivenArgs := len(os.Args) - 1 + + if numGivenArgs < numExpectedArgs { + fmt.Printf("Expected at least %d args, only got %d\n", numExpectedArgs, numGivenArgs) + os.Exit(2) + } + + for i, argRegexp := range expectedArgs { + givenArg := os.Args[i+1] + if !argRegexp.MatchString(givenArg) { + fmt.Printf("Expected arg '%s' to match '%s'\n", givenArg, argRegexp.String()) + os.Exit(1) + } + } + fmt.Println("Everything is dandy") + fmt.Fprintln(os.Stderr, "serving insecure client requests on 127.0.0.1:2379") + + time.Sleep(10 * time.Minute) +} diff --git a/pkg/framework/test/ci/pipeline.yml b/pkg/framework/test/ci/pipeline.yml new file mode 100644 index 000000000..5acdc7765 --- /dev/null +++ b/pkg/framework/test/ci/pipeline.yml @@ -0,0 +1,75 @@ + +jobs: +- name: test-dev-branch + public: true + serial: true + plan: + - get: git-kubectl-dev + trigger: true + - task: run-tests + config: + platform: linux + image_resource: + type: docker-image + source: + repository: golang + tag: 1.9 + inputs: + - name: git-kubectl-dev + path: go/src/k8s.io/kubectl + run: + path: /bin/bash + args: + - -c + - | + #!/usr/bin/env bash + set -eux + chown -R nobody:nogroup "${PWD}/go" + + cat <<'EOS' | su -c bash -s /bin/bash nobody + set -eux + export GOPATH="${PWD}/go" + export PATH="${PATH}:/usr/local/go/bin:${GOPATH}/bin" + go get github.com/onsi/ginkgo/ginkgo + "${GOPATH}/src/k8s.io/kubectl/pkg/framework/test/scripts/download-binaries.sh" + GINKGO_PERFORMANCE=1 "${GOPATH}/src/k8s.io/kubectl/pkg/framework/test/scripts/run-tests.sh" + EOS +- name: push-to-prod-branch + serial: true + plan: + - get: git-kubectl-dev + trigger: true + passed: + - test-dev-branch + - put: git-kubectl-pair2 + params: + repository: git-kubectl-dev + force: true + - put: git-kubectl-pair1 + params: + repository: git-kubectl-dev + force: true + + +resources: +- name: git-kubectl-dev + type: git + source: + uri: {{git-dev-url}} # git@github.com:totherme/kubectl + branch: test-framework-dev + private_key: {{git-dev-private-key}} + ignore_paths: [pkg/framework/test/ci] + +- name: git-kubectl-pair1 + type: git + source: + uri: {{git-pair1-url}} #git@github.com:totherme/kubectl + branch: test-framework + private_key: {{git-pair1-private-key}} + +- name: git-kubectl-pair2 + type: git + source: + uri: {{git-pair2-url}} #git@github.com:hoegaarden/kubectl + branch: test-framework + private_key: {{git-pair2-private-key}} diff --git a/pkg/framework/test/ci/set-pipeline.sh b/pkg/framework/test/ci/set-pipeline.sh new file mode 100755 index 000000000..5465d57be --- /dev/null +++ b/pkg/framework/test/ci/set-pipeline.sh @@ -0,0 +1,38 @@ +#!/usr/bin/env bash +set -eu + +# Use DEBUG=1 ./set-pipeline.sh to get debug output +[[ -z "${DEBUG:-""}" ]] || set -x + +# Use CONCOURSE_TARGET=my-concourse ./set-pipeline.sh to connect to your local concourse +: "${CONCOURSE_TARGET:="wings"}" +# Use PIPELINE_NAME=my-name ./set-pipeline.sh to give your pipeline a different name +: "${PIPELINE_NAME:="kubectl"}" + +# Use PAIR1_LASTPASS=my-lastpass-key ./set-pipeline.sh to get your github keys and URL from your lastpass entry +: "${PAIR1_LASTPASS:="oss-k8s-github-gds-keypair"}" +: "${PAIR2_LASTPASS:="oss-k8s-github-hhorl-keypair"}" + +github_pair1_key="$(lpass show "${PAIR1_LASTPASS}" --field "Private Key")" +github_pair2_key="$(lpass show "${PAIR2_LASTPASS}" --field "Private Key")" +github_pair1_url="$(lpass show "${PAIR1_LASTPASS}" --notes)" +github_pair2_url="$(lpass show "${PAIR2_LASTPASS}" --notes)" + +script_dir="$(cd "$(dirname "$0")" ; pwd)" + +# Create/Update the pipline +fly set-pipeline \ + --target="${CONCOURSE_TARGET}" \ + --pipeline="${PIPELINE_NAME}" \ + --config="${script_dir}/pipeline.yml" \ + --var=git-dev-url="${github_pair1_url}" \ + --var=git-pair1-url="${github_pair1_url}" \ + --var=git-pair2-url="${github_pair2_url}" \ + --var=git-dev-private-key="${github_pair1_key}" \ + --var=git-pair1-private-key="${github_pair1_key}" \ + --var=git-pair2-private-key="${github_pair2_key}" + +# Make the pipeline publicly available +fly expose-pipeline \ + --target="${CONCOURSE_TARGET}" \ + --pipeline="${PIPELINE_NAME}" diff --git a/pkg/framework/test/democli/cmd/listPods.go b/pkg/framework/test/democli/cmd/listPods.go new file mode 100644 index 000000000..8274c1338 --- /dev/null +++ b/pkg/framework/test/democli/cmd/listPods.go @@ -0,0 +1,65 @@ +// Copyright © 2017 NAME HERE +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cmd + +import ( + "fmt" + + "github.com/spf13/cobra" + "k8s.io/client-go/kubernetes" + "k8s.io/client-go/tools/clientcmd" + + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" +) + +// listPodsCmd represents the listPods command +var listPodsCmd = &cobra.Command{ + Use: "listPods", + Short: "List all pods", + Long: `Give a list of all pods known by the system`, + Run: func(cmd *cobra.Command, args []string) { + runGetPods() + }, +} + +func runGetPods() { + config, _ := clientcmd.BuildConfigFromFlags("http://localhost:8080", "") + + // create the clientset + clientset, _ := kubernetes.NewForConfig(config) + + pods, err := clientset.CoreV1().Pods("").List(metav1.ListOptions{}) + if err != nil { + panic(err) + } + if len(pods.Items) > 0 { + } else { + fmt.Println("There are no pods.") + } +} + +func init() { + RootCmd.AddCommand(listPodsCmd) + + // Here you will define your flags and configuration settings. + + // Cobra supports Persistent Flags which will work for this command + // and all subcommands, e.g.: + // listPodsCmd.PersistentFlags().String("foo", "", "A help for foo") + + // Cobra supports local flags which will only run when this command + // is called directly, e.g.: + // listPodsCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle") +} diff --git a/pkg/framework/test/democli/cmd/root.go b/pkg/framework/test/democli/cmd/root.go new file mode 100644 index 000000000..db95f7a8a --- /dev/null +++ b/pkg/framework/test/democli/cmd/root.go @@ -0,0 +1,86 @@ +// Copyright © 2017 NAME HERE +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cmd + +import ( + "fmt" + "os" + + homedir "github.com/mitchellh/go-homedir" + "github.com/spf13/cobra" + "github.com/spf13/viper" +) + +var cfgFile string + +// RootCmd represents the base command when called without any subcommands +var RootCmd = &cobra.Command{ + Use: "democli", + Short: "A demo CLI application", + Long: `This is a demo kubernetes CLI, which interacts with the kubernetes API. + +The purpose of this CLI is to demo the testing framework that was used to develop it.`, + // Uncomment the following line if your bare application + // has an action associated with it: + // Run: func(cmd *cobra.Command, args []string) { }, +} + +// Execute adds all child commands to the root command and sets flags appropriately. +// This is called by main.main(). It only needs to happen once to the rootCmd. +func Execute() { + if err := RootCmd.Execute(); err != nil { + fmt.Println(err) + os.Exit(1) + } +} + +func init() { + cobra.OnInitialize(initConfig) + + // Here you will define your flags and configuration settings. + // Cobra supports persistent flags, which, if defined here, + // will be global for your application. + RootCmd.PersistentFlags().StringVar(&cfgFile, "config", "", "config file (default is $HOME/.democli.yaml)") + + // Cobra also supports local flags, which will only run + // when this action is called directly. + RootCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle") +} + +// initConfig reads in config file and ENV variables if set. +func initConfig() { + if cfgFile != "" { + // Use config file from the flag. + viper.SetConfigFile(cfgFile) + } else { + // Find home directory. + home, err := homedir.Dir() + if err != nil { + fmt.Println(err) + os.Exit(1) + } + + // Search config in home directory with name ".democli" (without extension). + viper.AddConfigPath(home) + viper.SetConfigName(".democli") + } + + viper.AutomaticEnv() // read in environment variables that match + + // If a config file is found, read it in. + if err := viper.ReadInConfig(); err == nil { + fmt.Println("Using config file:", viper.ConfigFileUsed()) + } +} diff --git a/pkg/framework/test/democli/integration/integration_suite_test.go b/pkg/framework/test/democli/integration/integration_suite_test.go new file mode 100644 index 000000000..a10a43ffb --- /dev/null +++ b/pkg/framework/test/democli/integration/integration_suite_test.go @@ -0,0 +1,42 @@ +package integration_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + + "testing" + + "os" + + "path/filepath" + + "github.com/onsi/gomega/gexec" + "k8s.io/kubectl/pkg/framework/test" +) + +func TestIntegration(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Integration Suite") +} + +var ( + pathToDemoCommand string + fixtures *test.Fixtures +) + +var _ = BeforeSuite(func() { + var err error + pathToDemoCommand, err = gexec.Build("k8s.io/kubectl/pkg/framework/test/democli/") + Expect(err).NotTo(HaveOccurred()) + + assetsDir, ok := os.LookupEnv("KUBE_ASSETS_DIR") + Expect(ok).To(BeTrue(), "KUBE_ASSETS_DIR should point to a directory containing etcd and apiserver binaries") + fixtures = test.NewFixtures(filepath.Join(assetsDir, "etcd"), filepath.Join(assetsDir, "kube-apiserver")) + err = fixtures.Start() + Expect(err).NotTo(HaveOccurred()) +}) + +var _ = AfterSuite(func() { + fixtures.Stop() + gexec.CleanupBuildArtifacts() +}) diff --git a/pkg/framework/test/democli/integration/integration_test.go b/pkg/framework/test/democli/integration/integration_test.go new file mode 100644 index 000000000..6a7764ce7 --- /dev/null +++ b/pkg/framework/test/democli/integration/integration_test.go @@ -0,0 +1,30 @@ +package integration_test + +import ( + "os/exec" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + "github.com/onsi/gomega/gbytes" + "github.com/onsi/gomega/gexec" +) + +var _ = Describe("Integration", func() { + It("can give us a helpful help message", func() { + helpfulMessage := `This is a demo kubernetes CLI, which interacts with the kubernetes API.` + + command := exec.Command(pathToDemoCommand, "--help") + session, err := gexec.Start(command, GinkgoWriter, GinkgoWriter) + Expect(err).NotTo(HaveOccurred()) + Eventually(session).Should(gexec.Exit(0)) + Expect(session.Out).To(gbytes.Say(helpfulMessage)) + }) + + It("can get a list of pods", func() { + command := exec.Command(pathToDemoCommand, "listPods") + session, err := gexec.Start(command, GinkgoWriter, GinkgoWriter) + Expect(err).NotTo(HaveOccurred()) + Eventually(session).Should(gexec.Exit(0)) + Expect(session.Out).To(gbytes.Say("There are no pods.")) + }) +}) diff --git a/pkg/framework/test/democli/main.go b/pkg/framework/test/democli/main.go new file mode 100644 index 000000000..4dc7f4f0b --- /dev/null +++ b/pkg/framework/test/democli/main.go @@ -0,0 +1,21 @@ +// Copyright © 2017 NAME HERE +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import "k8s.io/kubectl/pkg/framework/test/democli/cmd" + +func main() { + cmd.Execute() +} diff --git a/pkg/framework/test/etcd.go b/pkg/framework/test/etcd.go new file mode 100644 index 000000000..44a52fafa --- /dev/null +++ b/pkg/framework/test/etcd.go @@ -0,0 +1,84 @@ +package test + +import ( + "fmt" + "os/exec" + "time" + + "github.com/onsi/gomega" + "github.com/onsi/gomega/gbytes" + "github.com/onsi/gomega/gexec" +) + +// Etcd knows how to run an etcd server. Set it up with the path to a precompiled binary. +type Etcd struct { + // The path to the etcd binary + Path string + EtcdURL string + session *gexec.Session + stdOut *gbytes.Buffer + stdErr *gbytes.Buffer + dataDirManager dataDirManager +} + +type dataDirManager interface { + Create() (string, error) + Destroy() error +} + +// Start starts the etcd, waits for it to come up, and returns an error, if occoured. +func (e *Etcd) Start() error { + e.dataDirManager = NewTempDirManager() + e.stdOut = gbytes.NewBuffer() + e.stdErr = gbytes.NewBuffer() + + dataDir, err := e.dataDirManager.Create() + if err != nil { + return err + } + + args := []string{ + "--debug", + "--advertise-client-urls", + e.EtcdURL, + "--listen-client-urls", + e.EtcdURL, + "--data-dir", + dataDir, + } + + detectedStart := e.stdErr.Detect("serving insecure client requests on 127.0.0.1:2379") + timedOut := time.After(20 * time.Second) + + command := exec.Command(e.Path, args...) + e.session, err = gexec.Start(command, e.stdOut, e.stdErr) + if err != nil { + return err + } + + select { + case <-detectedStart: + return nil + case <-timedOut: + return fmt.Errorf("timeout waiting for etcd to start serving") + } +} + +// Stop stops this process gracefully, waits for its termination, and cleans up the data directory. +func (e *Etcd) Stop() { + if e.session != nil { + e.session.Terminate().Wait(20 * time.Second) + err := e.dataDirManager.Destroy() + gomega.Expect(err).NotTo(gomega.HaveOccurred()) + } +} + +// ExitCode returns the exit code of the process, if it has exited. If it hasn't exited yet, ExitCode returns -1. +func (e *Etcd) ExitCode() int { + return e.session.ExitCode() +} + +// Buffer implements the gbytes.BufferProvider interface and returns the stdout of the process +func (e *Etcd) Buffer() *gbytes.Buffer { + return e.session.Buffer() +} diff --git a/pkg/framework/test/etcd_test.go b/pkg/framework/test/etcd_test.go new file mode 100644 index 000000000..ded03ecac --- /dev/null +++ b/pkg/framework/test/etcd_test.go @@ -0,0 +1,61 @@ +package test_test + +import ( + . "k8s.io/kubectl/pkg/framework/test" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + "github.com/onsi/gomega/gbytes" + "github.com/onsi/gomega/gexec" +) + +var _ = Describe("Etcd", func() { + + Context("when given a path to a binary that runs for a long time", func() { + It("can start and stop that binary", func() { + pathToFakeEtcd, err := gexec.Build("k8s.io/kubectl/pkg/framework/test/assets/fakeetcd") + Expect(err).NotTo(HaveOccurred()) + etcd := &Etcd{ + Path: pathToFakeEtcd, + EtcdURL: "our etcd url", + } + + By("Starting the Etcd Server") + err = etcd.Start() + Expect(err).NotTo(HaveOccurred()) + + Eventually(etcd).Should(gbytes.Say("Everything is dandy")) + Expect(etcd).NotTo(gexec.Exit()) + + By("Stopping the Etcd Server") + etcd.Stop() + Expect(etcd).To(gexec.Exit(143)) + }) + + }) + + Context("when no path is given", func() { + It("fails with a helpful error", func() { + etcd := &Etcd{} + err := etcd.Start() + Expect(err).To(MatchError(ContainSubstring("no such file or directory"))) + }) + }) + + Context("when given a path to a non-executable", func() { + It("fails with a helpful error", func() { + etcd := &Etcd{ + Path: "./etcd.go", + } + err := etcd.Start() + Expect(err).To(MatchError(ContainSubstring("./etcd.go: permission denied"))) + }) + }) + + Context("when we try to stop a server that hasn't been started", func() { + It("does not panic", func() { + etcd := &Etcd{} + etcd.Stop() + }) + }) +}) diff --git a/pkg/framework/test/fixtures.go b/pkg/framework/test/fixtures.go new file mode 100644 index 000000000..0c01a6db5 --- /dev/null +++ b/pkg/framework/test/fixtures.go @@ -0,0 +1,65 @@ +package test + +// Fixtures is a struct that knows how to start all your test fixtures. +// +// Right now, that means Etcd and your APIServer. This is likely to increase in future. +type Fixtures struct { + Etcd FixtureProcess + APIServer FixtureProcess +} + +// FixtureProcess knows how to start and stop a Fixture processes. +// This interface is potentially going to be expanded to e.g. allow access to the processes StdOut/StdErr +// and other internals. +type FixtureProcess interface { + Start() error + Stop() +} + +//go:generate counterfeiter . FixtureProcess + +// NewFixtures will give you a Fixtures struct that's properly wired together. +func NewFixtures(pathToEtcd, pathToAPIServer string) *Fixtures { + etcdURL := "http://127.0.0.1:2379" + return &Fixtures{ + Etcd: &Etcd{ + Path: pathToEtcd, + EtcdURL: etcdURL, + }, + APIServer: &APIServer{ + Path: pathToAPIServer, + EtcdURL: etcdURL, + }, + } +} + +// Start will start all your fixtures. To stop them, call Stop(). +func (f *Fixtures) Start() error { + started := make(chan error) + starter := func(process FixtureProcess) { + started <- process.Start() + } + processes := []FixtureProcess{ + f.Etcd, + f.APIServer, + } + + for _, process := range processes { + go starter(process) + } + + for pendingProcesses := len(processes); pendingProcesses > 0; pendingProcesses-- { + if err := <-started; err != nil { + return err + } + } + + return nil +} + +// Stop will stop all your fixtures, and clean up their data. +func (f *Fixtures) Stop() error { + f.APIServer.Stop() + f.Etcd.Stop() + return nil +} diff --git a/pkg/framework/test/fixtures_test.go b/pkg/framework/test/fixtures_test.go new file mode 100644 index 000000000..ee542dcb3 --- /dev/null +++ b/pkg/framework/test/fixtures_test.go @@ -0,0 +1,71 @@ +package test_test + +import ( + . "k8s.io/kubectl/pkg/framework/test" + + "fmt" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + "k8s.io/kubectl/pkg/framework/test/testfakes" +) + +var _ = Describe("Fixtures", func() { + It("can construct a properly wired Fixtures struct", func() { + f := NewFixtures("path to etcd", "path to apiserver") + Expect(f.Etcd.(*Etcd).Path).To(Equal("path to etcd")) + Expect(f.APIServer.(*APIServer).Path).To(Equal("path to apiserver")) + }) + + Context("with a properly configured set of Fixtures", func() { + var ( + fakeEtcdProcess *testfakes.FakeFixtureProcess + fakeAPIServerProcess *testfakes.FakeFixtureProcess + fixtures Fixtures + ) + BeforeEach(func() { + fakeEtcdProcess = &testfakes.FakeFixtureProcess{} + fakeAPIServerProcess = &testfakes.FakeFixtureProcess{} + fixtures = Fixtures{ + Etcd: fakeEtcdProcess, + APIServer: fakeAPIServerProcess, + } + }) + + It("can start them", func() { + err := fixtures.Start() + Expect(err).NotTo(HaveOccurred()) + + By("starting Etcd") + Expect(fakeEtcdProcess.StartCallCount()).To(Equal(1), + "the EtcdStartStopper should be called exactly once") + + By("starting APIServer") + Expect(fakeAPIServerProcess.StartCallCount()).To(Equal(1), + "the APIServerStartStopper should be called exactly once") + }) + + Context("when starting etcd fails", func() { + It("wraps the error", func() { + fakeEtcdProcess.StartReturns(fmt.Errorf("some error")) + err := fixtures.Start() + Expect(err).To(MatchError(ContainSubstring("some error"))) + }) + }) + + Context("when starting APIServer fails", func() { + It("wraps the error", func() { + fakeAPIServerProcess.StartReturns(fmt.Errorf("another error")) + err := fixtures.Start() + Expect(err).To(MatchError(ContainSubstring("another error"))) + }) + }) + + It("can can clean up the temporary directory and stop", func() { + fixtures.Stop() + Expect(fakeEtcdProcess.StopCallCount()).To(Equal(1)) + Expect(fakeAPIServerProcess.StopCallCount()).To(Equal(1)) + }) + + }) +}) diff --git a/pkg/framework/test/integration/integration_suite_test.go b/pkg/framework/test/integration/integration_suite_test.go new file mode 100644 index 000000000..37dd60fc1 --- /dev/null +++ b/pkg/framework/test/integration/integration_suite_test.go @@ -0,0 +1,35 @@ +package integration_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + + "testing" + + "os" + "path/filepath" + + "github.com/onsi/gomega/gexec" +) + +func TestIntegration(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Integration Suite") +} + +var ( + defaultPathToEtcd string + defaultPathToApiserver string +) + +var _ = BeforeSuite(func() { + assetsDir, ok := os.LookupEnv("KUBE_ASSETS_DIR") + Expect(ok).To(BeTrue(), "Expected $KUBE_ASSETS_DIR to be set") + + defaultPathToEtcd = filepath.Join(assetsDir, "etcd") + defaultPathToApiserver = filepath.Join(assetsDir, "kube-apiserver") +}) + +var _ = AfterSuite(func() { + gexec.TerminateAndWait() +}) diff --git a/pkg/framework/test/integration/integration_test.go b/pkg/framework/test/integration/integration_test.go new file mode 100644 index 000000000..189da0442 --- /dev/null +++ b/pkg/framework/test/integration/integration_test.go @@ -0,0 +1,58 @@ +package integration_test + +import ( + "fmt" + "net" + "time" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + "k8s.io/kubectl/pkg/framework/test" +) + +var _ = Describe("Integration", func() { + It("Successfully manages the fixtures lifecycle", func() { + fixtures := test.NewFixtures(defaultPathToEtcd, defaultPathToApiserver) + + err := fixtures.Start() + Expect(err).NotTo(HaveOccurred(), "Expected fixtures to start successfully") + + isEtcdListening := isSomethingListeningOnPort(2379) + isAPIServerListening := isSomethingListeningOnPort(8080) + + Expect(isEtcdListening()).To(BeTrue(), "Expected Etcd to listen on 2379") + + Expect(isAPIServerListening()).To(BeTrue(), "Expected APIServer to listen on 8080") + + err = fixtures.Stop() + Expect(err).NotTo(HaveOccurred(), "Expected fixtures to stop successfully") + + Expect(isEtcdListening()).To(BeFalse(), "Expected Etcd not to listen anymore") + + By("Ensuring APIServer is not listening anymore") + Expect(isAPIServerListening()).To(BeFalse(), "Expected APIServer not to listen anymore") + }) + + Measure("It should be fast to bring up and tear down the fixtures", func(b Benchmarker) { + b.Time("lifecycle", func() { + fixtures := test.NewFixtures(defaultPathToEtcd, defaultPathToApiserver) + + fixtures.Start() + fixtures.Stop() + }) + }, 10) +}) + +type portChecker func() bool + +func isSomethingListeningOnPort(port int) portChecker { + return func() bool { + conn, err := net.DialTimeout("tcp", net.JoinHostPort("", fmt.Sprintf("%d", port)), 1*time.Second) + + if err != nil { + return false + } + conn.Close() + return true + } +} diff --git a/pkg/framework/test/scripts/download-binaries.sh b/pkg/framework/test/scripts/download-binaries.sh new file mode 100755 index 000000000..5822f9c26 --- /dev/null +++ b/pkg/framework/test/scripts/download-binaries.sh @@ -0,0 +1,24 @@ +#!/usr/bin/env bash +set -eu + +# Use DEBUG=1 ./scripts/download-binaries.sh to get debug output +quiet="--quiet" +[[ -z "${DEBUG:-""}" ]] || { + set -x + quiet="" +} + +# Use BASE_URL=https://my/binaries/url ./scripts/download-binaries to download +# from a different bucket +: "${BASE_URL:="https://storage.googleapis.com/k8s-c10s-test-binaries"}" + +test_framework_dir="$(cd "$(dirname "$0")/.." ; pwd)" +os="$(uname -s)" +arch="$(uname -m)" + +echo "About to download a couple of binaries. This might take a while..." +wget $quiet "${BASE_URL}/etcd-${os}-${arch}" -O "${test_framework_dir}/assets/bin/etcd" +wget $quiet "${BASE_URL}/kube-apiserver-${os}-${arch}" -O "${test_framework_dir}/assets/bin/kube-apiserver" +chmod +x "${test_framework_dir}/assets/bin/etcd" +chmod +x "${test_framework_dir}/assets/bin/kube-apiserver" +echo "Done!" diff --git a/pkg/framework/test/scripts/run-tests.sh b/pkg/framework/test/scripts/run-tests.sh new file mode 100755 index 000000000..cf2e67602 --- /dev/null +++ b/pkg/framework/test/scripts/run-tests.sh @@ -0,0 +1,20 @@ +#!/usr/bin/env bash +set -eu + +# Use DEBUG=1 ./scripts/run-tests.sh to get debug output +[[ -z "${DEBUG:-""}" ]] || set -x + +GINKGO="ginkgo" +if [[ -n "${GINKGO_WATCH:-""}" ]] ; then + GINKGO="$GINKGO watch" +fi + +if [[ -z ${GINKGO_PERFORMANCE:-""} ]] ; then + GINKGO="$GINKGO -skipMeasurements" +fi + +test_framework_dir="$(cd "$(dirname "$0")/.." ; pwd)" + +export KUBE_ASSETS_DIR="${test_framework_dir}/assets/bin" + +$GINKGO -r "${test_framework_dir}" diff --git a/pkg/framework/test/temp_dir_manager.go b/pkg/framework/test/temp_dir_manager.go new file mode 100644 index 000000000..232bb9892 --- /dev/null +++ b/pkg/framework/test/temp_dir_manager.go @@ -0,0 +1,49 @@ +package test + +import ( + "io/ioutil" + "os" +) + +// TempDirMaker can create directories. +type TempDirMaker func(dir, prefix string) (name string, err error) + +// TempDirRemover can delete directories +type TempDirRemover func(dir string) error + +// NewTempDirManager returns a new manager for creation and deleteion of temporary directories. +func NewTempDirManager() *TempDirManager { + return &TempDirManager{ + Maker: ioutil.TempDir, + Remover: os.RemoveAll, + } +} + +// TempDirManager knows when to call the directory maker and remover and keeps track of created directories. +type TempDirManager struct { + Maker TempDirMaker + Remover TempDirRemover + dir string +} + +// Create knows how to create a temporary directory and how to keep track of it. +func (t *TempDirManager) Create() (string, error) { + if t.dir == "" { + dir, err := t.Maker("", "kube-test-framework-") + if err != nil { + return "", err + } + t.dir = dir + } + return t.dir, nil +} + +// Destroy knows how to destroy a previously created directory. +func (t *TempDirManager) Destroy() error { + if t.dir != "" { + err := t.Remover(t.dir) + t.dir = "" + return err + } + return nil +} diff --git a/pkg/framework/test/temp_dir_manager_test.go b/pkg/framework/test/temp_dir_manager_test.go new file mode 100644 index 000000000..a070b4b5b --- /dev/null +++ b/pkg/framework/test/temp_dir_manager_test.go @@ -0,0 +1,111 @@ +package test_test + +import ( + "fmt" + + . "k8s.io/kubectl/pkg/framework/test" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" +) + +var _ = Describe("TempDirManager", func() { + var ( + manager *TempDirManager + removerError error + createError error + managedDirCount int + separateDirCounter int + ) + BeforeEach(func() { + managedDirCount = 0 + separateDirCounter = 0 + createError = nil + removerError = nil + manager = NewTempDirManager() + manager.Maker = func(dir, prefix string) (string, error) { + managedDirCount += 1 + separateDirCounter += 1 + return fmt.Sprintf("%d-%s-%s", separateDirCounter, dir, prefix), createError + } + manager.Remover = func(dir string) error { + managedDirCount -= 1 + return removerError + } + }) + + It("can creates and remove directories", func() { + Expect(managedDirCount).To(Equal(0)) + manager.Create() + Expect(managedDirCount).To(Equal(1)) + manager.Destroy() + Expect(managedDirCount).To(Equal(0)) + }) + + Context("when I call Create() multiple times on the same manager", func() { + It("returns the same directory every time", func() { + var dir1, dir2 string + var err error + + Expect(managedDirCount).To(Equal(0)) + + dir1, err = manager.Create() + Expect(err).NotTo(HaveOccurred()) + Expect(managedDirCount).To(Equal(1)) + + dir2, err = manager.Create() + Expect(err).NotTo(HaveOccurred()) + Expect(managedDirCount).To(Equal(1)) + Expect(dir1).To(Equal(dir2)) + }) + + It("deletes the managed directory as soon as Destroy() is called even once", func() { + var err error + + Expect(managedDirCount).To(Equal(0)) + + _, err = manager.Create() + Expect(err).NotTo(HaveOccurred()) + _, err = manager.Create() + Expect(err).NotTo(HaveOccurred()) + Expect(managedDirCount).To(Equal(1)) + + manager.Destroy() + Expect(managedDirCount).To(Equal(0)) + }) + }) + + Context("when I call Destroy() without calling create first", func() { + It("does nothing", func() { + Expect(managedDirCount).To(Equal(0)) + manager.Destroy() + Expect(managedDirCount).To(Equal(0)) + }) + }) + + Context("when the remover returns an error", func() { + JustBeforeEach(func() { + removerError = fmt.Errorf("Error on removing dir") + }) + It("handles that error depending on whether Create() has been called", func() { + By("avoiding the error if Create() has not been called") + err := manager.Destroy() + Expect(err).NotTo(HaveOccurred()) + + By("propagating the error if Create() has been called") + manager.Create() + err = manager.Destroy() + Expect(err).To(MatchError("Error on removing dir")) + }) + }) + + Context("when the creater returns an error", func() { + JustBeforeEach(func() { + createError = fmt.Errorf("Error on creating dir") + }) + It("bubbles up the error", func() { + _, err := manager.Create() + Expect(err).To(MatchError("Error on creating dir")) + }) + }) +}) diff --git a/pkg/framework/test/test_suite_test.go b/pkg/framework/test/test_suite_test.go new file mode 100644 index 000000000..76e998ced --- /dev/null +++ b/pkg/framework/test/test_suite_test.go @@ -0,0 +1,13 @@ +package test_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + + "testing" +) + +func TestTest(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Test Suite") +} diff --git a/pkg/framework/test/testfakes/fake_fixture_process.go b/pkg/framework/test/testfakes/fake_fixture_process.go new file mode 100644 index 000000000..9b0c0cb21 --- /dev/null +++ b/pkg/framework/test/testfakes/fake_fixture_process.go @@ -0,0 +1,109 @@ +// Code generated by counterfeiter. DO NOT EDIT. +package testfakes + +import ( + "sync" + + "k8s.io/kubectl/pkg/framework/test" +) + +type FakeFixtureProcess struct { + StartStub func() error + startMutex sync.RWMutex + startArgsForCall []struct{} + startReturns struct { + result1 error + } + startReturnsOnCall map[int]struct { + result1 error + } + StopStub func() + stopMutex sync.RWMutex + stopArgsForCall []struct{} + invocations map[string][][]interface{} + invocationsMutex sync.RWMutex +} + +func (fake *FakeFixtureProcess) Start() error { + fake.startMutex.Lock() + ret, specificReturn := fake.startReturnsOnCall[len(fake.startArgsForCall)] + fake.startArgsForCall = append(fake.startArgsForCall, struct{}{}) + fake.recordInvocation("Start", []interface{}{}) + fake.startMutex.Unlock() + if fake.StartStub != nil { + return fake.StartStub() + } + if specificReturn { + return ret.result1 + } + return fake.startReturns.result1 +} + +func (fake *FakeFixtureProcess) StartCallCount() int { + fake.startMutex.RLock() + defer fake.startMutex.RUnlock() + return len(fake.startArgsForCall) +} + +func (fake *FakeFixtureProcess) StartReturns(result1 error) { + fake.StartStub = nil + fake.startReturns = struct { + result1 error + }{result1} +} + +func (fake *FakeFixtureProcess) StartReturnsOnCall(i int, result1 error) { + fake.StartStub = nil + if fake.startReturnsOnCall == nil { + fake.startReturnsOnCall = make(map[int]struct { + result1 error + }) + } + fake.startReturnsOnCall[i] = struct { + result1 error + }{result1} +} + +func (fake *FakeFixtureProcess) Stop() { + fake.stopMutex.Lock() + fake.stopArgsForCall = append(fake.stopArgsForCall, struct{}{}) + fake.recordInvocation("Stop", []interface{}{}) + fake.stopMutex.Unlock() + if fake.StopStub != nil { + fake.StopStub() + } +} + +func (fake *FakeFixtureProcess) StopCallCount() int { + fake.stopMutex.RLock() + defer fake.stopMutex.RUnlock() + return len(fake.stopArgsForCall) +} + +func (fake *FakeFixtureProcess) Invocations() map[string][][]interface{} { + fake.invocationsMutex.RLock() + defer fake.invocationsMutex.RUnlock() + fake.startMutex.RLock() + defer fake.startMutex.RUnlock() + fake.stopMutex.RLock() + defer fake.stopMutex.RUnlock() + copiedInvocations := map[string][][]interface{}{} + for key, value := range fake.invocations { + copiedInvocations[key] = value + } + return copiedInvocations +} + +func (fake *FakeFixtureProcess) recordInvocation(key string, args []interface{}) { + fake.invocationsMutex.Lock() + defer fake.invocationsMutex.Unlock() + if fake.invocations == nil { + fake.invocations = map[string][][]interface{}{} + } + if fake.invocations[key] == nil { + fake.invocations[key] = [][]interface{}{} + } + fake.invocations[key] = append(fake.invocations[key], args) +} + +var _ test.FixtureProcess = new(FakeFixtureProcess) diff --git a/vendor/github.com/emicklei/go-restful-swagger12/.travis.yml b/vendor/github.com/emicklei/go-restful-swagger12/.travis.yml new file mode 100644 index 000000000..c74e4fa57 --- /dev/null +++ b/vendor/github.com/emicklei/go-restful-swagger12/.travis.yml @@ -0,0 +1,4 @@ +language: go + +go: + - 1.x \ No newline at end of file diff --git a/vendor/github.com/emicklei/go-restful-swagger12/CHANGES.md b/vendor/github.com/emicklei/go-restful-swagger12/CHANGES.md new file mode 100644 index 000000000..213b8e7b3 --- /dev/null +++ b/vendor/github.com/emicklei/go-restful-swagger12/CHANGES.md @@ -0,0 +1,46 @@ +Change history of swagger += +2017-01-30 +- moved from go-restful/swagger to go-restful-swagger12 + +2015-10-16 +- add type override mechanism for swagger models (MR 254, nathanejohnson) +- replace uses of wildcard in generated apidocs (issue 251) + +2015-05-25 +- (api break) changed the type of Properties in Model +- (api break) changed the type of Models in ApiDeclaration +- (api break) changed the parameter type of PostBuildDeclarationMapFunc + +2015-04-09 +- add ModelBuildable interface for customization of Model + +2015-03-17 +- preserve order of Routes per WebService in Swagger listing +- fix use of $ref and type in Swagger models +- add api version to listing + +2014-11-14 +- operation parameters are now sorted using ordering path,query,form,header,body + +2014-11-12 +- respect omitempty tag value for embedded structs +- expose ApiVersion of WebService to Swagger ApiDeclaration + +2014-05-29 +- (api add) Ability to define custom http.Handler to serve swagger-ui static files + +2014-05-04 +- (fix) include model for array element type of response + +2014-01-03 +- (fix) do not add primitive type to the Api models + +2013-11-27 +- (fix) make Swagger work for WebServices with root ("/" or "") paths + +2013-10-29 +- (api add) package variable LogInfo to customize logging function + +2013-10-15 +- upgraded to spec version 1.2 (https://github.com/wordnik/swagger-core/wiki/1.2-transition) \ No newline at end of file diff --git a/vendor/github.com/emicklei/go-restful-swagger12/LICENSE b/vendor/github.com/emicklei/go-restful-swagger12/LICENSE new file mode 100644 index 000000000..aeab5b440 --- /dev/null +++ b/vendor/github.com/emicklei/go-restful-swagger12/LICENSE @@ -0,0 +1,22 @@ +Copyright (c) 2017 Ernest Micklei + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/vendor/github.com/emicklei/go-restful-swagger12/README.md b/vendor/github.com/emicklei/go-restful-swagger12/README.md new file mode 100644 index 000000000..cad28966a --- /dev/null +++ b/vendor/github.com/emicklei/go-restful-swagger12/README.md @@ -0,0 +1,83 @@ +# go-restful-swagger12 + +[![Build Status](https://travis-ci.org/emicklei/go-restful-swagger12.png)](https://travis-ci.org/emicklei/go-restful-swagger12) +[![GoDoc](https://godoc.org/github.com/emicklei/go-restful-swagger12?status.svg)](https://godoc.org/github.com/emicklei/go-restful-swagger12) + +How to use Swagger UI with go-restful += + +Get the Swagger UI sources (version 1.2 only) + + git clone https://github.com/wordnik/swagger-ui.git + +The project contains a "dist" folder. +Its contents has all the Swagger UI files you need. + +The `index.html` has an `url` set to `http://petstore.swagger.wordnik.com/api/api-docs`. +You need to change that to match your WebService JSON endpoint e.g. `http://localhost:8080/apidocs.json` + +Now, you can install the Swagger WebService for serving the Swagger specification in JSON. + + config := swagger.Config{ + WebServices: restful.RegisteredWebServices(), + ApiPath: "/apidocs.json", + SwaggerPath: "/apidocs/", + SwaggerFilePath: "/Users/emicklei/Projects/swagger-ui/dist"} + swagger.InstallSwaggerService(config) + + +Documenting Structs +-- + +Currently there are 2 ways to document your structs in the go-restful Swagger. + +###### By using struct tags +- Use tag "description" to annotate a struct field with a description to show in the UI +- Use tag "modelDescription" to annotate the struct itself with a description to show in the UI. The tag can be added in an field of the struct and in case that there are multiple definition, they will be appended with an empty line. + +###### By using the SwaggerDoc method +Here is an example with an `Address` struct and the documentation for each of the fields. The `""` is a special entry for **documenting the struct itself**. + + type Address struct { + Country string `json:"country,omitempty"` + PostCode int `json:"postcode,omitempty"` + } + + func (Address) SwaggerDoc() map[string]string { + return map[string]string{ + "": "Address doc", + "country": "Country doc", + "postcode": "PostCode doc", + } + } + +This example will generate a JSON like this + + { + "Address": { + "id": "Address", + "description": "Address doc", + "properties": { + "country": { + "type": "string", + "description": "Country doc" + }, + "postcode": { + "type": "integer", + "format": "int32", + "description": "PostCode doc" + } + } + } + } + +**Very Important Notes:** +- `SwaggerDoc()` is using a **NON-Pointer** receiver (e.g. func (Address) and not func (*Address)) +- The returned map should use as key the name of the field as defined in the JSON parameter (e.g. `"postcode"` and not `"PostCode"`) + +Notes +-- +- The Nickname of an Operation is automatically set by finding the name of the function. You can override it using RouteBuilder.Operation(..) +- The WebServices field of swagger.Config can be used to control which service you want to expose and document ; you can have multiple configs and therefore multiple endpoints. + +© 2017, ernestmicklei.com. MIT License. Contributions welcome. \ No newline at end of file diff --git a/vendor/github.com/emicklei/go-restful-swagger12/api_declaration_list.go b/vendor/github.com/emicklei/go-restful-swagger12/api_declaration_list.go new file mode 100644 index 000000000..9f4c3690a --- /dev/null +++ b/vendor/github.com/emicklei/go-restful-swagger12/api_declaration_list.go @@ -0,0 +1,64 @@ +package swagger + +// Copyright 2015 Ernest Micklei. All rights reserved. +// Use of this source code is governed by a license +// that can be found in the LICENSE file. + +import ( + "bytes" + "encoding/json" +) + +// ApiDeclarationList maintains an ordered list of ApiDeclaration. +type ApiDeclarationList struct { + List []ApiDeclaration +} + +// At returns the ApiDeclaration by its path unless absent, then ok is false +func (l *ApiDeclarationList) At(path string) (a ApiDeclaration, ok bool) { + for _, each := range l.List { + if each.ResourcePath == path { + return each, true + } + } + return a, false +} + +// Put adds or replaces a ApiDeclaration with this name +func (l *ApiDeclarationList) Put(path string, a ApiDeclaration) { + // maybe replace existing + for i, each := range l.List { + if each.ResourcePath == path { + // replace + l.List[i] = a + return + } + } + // add + l.List = append(l.List, a) +} + +// Do enumerates all the properties, each with its assigned name +func (l *ApiDeclarationList) Do(block func(path string, decl ApiDeclaration)) { + for _, each := range l.List { + block(each.ResourcePath, each) + } +} + +// MarshalJSON writes the ModelPropertyList as if it was a map[string]ModelProperty +func (l ApiDeclarationList) MarshalJSON() ([]byte, error) { + var buf bytes.Buffer + encoder := json.NewEncoder(&buf) + buf.WriteString("{\n") + for i, each := range l.List { + buf.WriteString("\"") + buf.WriteString(each.ResourcePath) + buf.WriteString("\": ") + encoder.Encode(each) + if i < len(l.List)-1 { + buf.WriteString(",\n") + } + } + buf.WriteString("}") + return buf.Bytes(), nil +} diff --git a/vendor/github.com/emicklei/go-restful-swagger12/config.go b/vendor/github.com/emicklei/go-restful-swagger12/config.go new file mode 100644 index 000000000..18f8e57d9 --- /dev/null +++ b/vendor/github.com/emicklei/go-restful-swagger12/config.go @@ -0,0 +1,46 @@ +package swagger + +import ( + "net/http" + "reflect" + + "github.com/emicklei/go-restful" +) + +// PostBuildDeclarationMapFunc can be used to modify the api declaration map. +type PostBuildDeclarationMapFunc func(apiDeclarationMap *ApiDeclarationList) + +// MapSchemaFormatFunc can be used to modify typeName at definition time. +type MapSchemaFormatFunc func(typeName string) string + +// MapModelTypeNameFunc can be used to return the desired typeName for a given +// type. It will return false if the default name should be used. +type MapModelTypeNameFunc func(t reflect.Type) (string, bool) + +type Config struct { + // url where the services are available, e.g. http://localhost:8080 + // if left empty then the basePath of Swagger is taken from the actual request + WebServicesUrl string + // path where the JSON api is avaiable , e.g. /apidocs + ApiPath string + // [optional] path where the swagger UI will be served, e.g. /swagger + SwaggerPath string + // [optional] location of folder containing Swagger HTML5 application index.html + SwaggerFilePath string + // api listing is constructed from this list of restful WebServices. + WebServices []*restful.WebService + // will serve all static content (scripts,pages,images) + StaticHandler http.Handler + // [optional] on default CORS (Cross-Origin-Resource-Sharing) is enabled. + DisableCORS bool + // Top-level API version. Is reflected in the resource listing. + ApiVersion string + // If set then call this handler after building the complete ApiDeclaration Map + PostBuildHandler PostBuildDeclarationMapFunc + // Swagger global info struct + Info Info + // [optional] If set, model builder should call this handler to get addition typename-to-swagger-format-field conversion. + SchemaFormatHandler MapSchemaFormatFunc + // [optional] If set, model builder should call this handler to retrieve the name for a given type. + ModelTypeNameHandler MapModelTypeNameFunc +} diff --git a/vendor/github.com/emicklei/go-restful-swagger12/model_builder.go b/vendor/github.com/emicklei/go-restful-swagger12/model_builder.go new file mode 100644 index 000000000..d40786f25 --- /dev/null +++ b/vendor/github.com/emicklei/go-restful-swagger12/model_builder.go @@ -0,0 +1,467 @@ +package swagger + +import ( + "encoding/json" + "reflect" + "strings" +) + +// ModelBuildable is used for extending Structs that need more control over +// how the Model appears in the Swagger api declaration. +type ModelBuildable interface { + PostBuildModel(m *Model) *Model +} + +type modelBuilder struct { + Models *ModelList + Config *Config +} + +type documentable interface { + SwaggerDoc() map[string]string +} + +// Check if this structure has a method with signature func () SwaggerDoc() map[string]string +// If it exists, retrive the documentation and overwrite all struct tag descriptions +func getDocFromMethodSwaggerDoc2(model reflect.Type) map[string]string { + if docable, ok := reflect.New(model).Elem().Interface().(documentable); ok { + return docable.SwaggerDoc() + } + return make(map[string]string) +} + +// addModelFrom creates and adds a Model to the builder and detects and calls +// the post build hook for customizations +func (b modelBuilder) addModelFrom(sample interface{}) { + if modelOrNil := b.addModel(reflect.TypeOf(sample), ""); modelOrNil != nil { + // allow customizations + if buildable, ok := sample.(ModelBuildable); ok { + modelOrNil = buildable.PostBuildModel(modelOrNil) + b.Models.Put(modelOrNil.Id, *modelOrNil) + } + } +} + +func (b modelBuilder) addModel(st reflect.Type, nameOverride string) *Model { + // Turn pointers into simpler types so further checks are + // correct. + if st.Kind() == reflect.Ptr { + st = st.Elem() + } + + modelName := b.keyFrom(st) + if nameOverride != "" { + modelName = nameOverride + } + // no models needed for primitive types + if b.isPrimitiveType(modelName) { + return nil + } + // golang encoding/json packages says array and slice values encode as + // JSON arrays, except that []byte encodes as a base64-encoded string. + // If we see a []byte here, treat it at as a primitive type (string) + // and deal with it in buildArrayTypeProperty. + if (st.Kind() == reflect.Slice || st.Kind() == reflect.Array) && + st.Elem().Kind() == reflect.Uint8 { + return nil + } + // see if we already have visited this model + if _, ok := b.Models.At(modelName); ok { + return nil + } + sm := Model{ + Id: modelName, + Required: []string{}, + Properties: ModelPropertyList{}} + + // reference the model before further initializing (enables recursive structs) + b.Models.Put(modelName, sm) + + // check for slice or array + if st.Kind() == reflect.Slice || st.Kind() == reflect.Array { + b.addModel(st.Elem(), "") + return &sm + } + // check for structure or primitive type + if st.Kind() != reflect.Struct { + return &sm + } + + fullDoc := getDocFromMethodSwaggerDoc2(st) + modelDescriptions := []string{} + + for i := 0; i < st.NumField(); i++ { + field := st.Field(i) + jsonName, modelDescription, prop := b.buildProperty(field, &sm, modelName) + if len(modelDescription) > 0 { + modelDescriptions = append(modelDescriptions, modelDescription) + } + + // add if not omitted + if len(jsonName) != 0 { + // update description + if fieldDoc, ok := fullDoc[jsonName]; ok { + prop.Description = fieldDoc + } + // update Required + if b.isPropertyRequired(field) { + sm.Required = append(sm.Required, jsonName) + } + sm.Properties.Put(jsonName, prop) + } + } + + // We always overwrite documentation if SwaggerDoc method exists + // "" is special for documenting the struct itself + if modelDoc, ok := fullDoc[""]; ok { + sm.Description = modelDoc + } else if len(modelDescriptions) != 0 { + sm.Description = strings.Join(modelDescriptions, "\n") + } + + // update model builder with completed model + b.Models.Put(modelName, sm) + + return &sm +} + +func (b modelBuilder) isPropertyRequired(field reflect.StructField) bool { + required := true + if jsonTag := field.Tag.Get("json"); jsonTag != "" { + s := strings.Split(jsonTag, ",") + if len(s) > 1 && s[1] == "omitempty" { + return false + } + } + return required +} + +func (b modelBuilder) buildProperty(field reflect.StructField, model *Model, modelName string) (jsonName, modelDescription string, prop ModelProperty) { + jsonName = b.jsonNameOfField(field) + if len(jsonName) == 0 { + // empty name signals skip property + return "", "", prop + } + + if field.Name == "XMLName" && field.Type.String() == "xml.Name" { + // property is metadata for the xml.Name attribute, can be skipped + return "", "", prop + } + + if tag := field.Tag.Get("modelDescription"); tag != "" { + modelDescription = tag + } + + prop.setPropertyMetadata(field) + if prop.Type != nil { + return jsonName, modelDescription, prop + } + fieldType := field.Type + + // check if type is doing its own marshalling + marshalerType := reflect.TypeOf((*json.Marshaler)(nil)).Elem() + if fieldType.Implements(marshalerType) { + var pType = "string" + if prop.Type == nil { + prop.Type = &pType + } + if prop.Format == "" { + prop.Format = b.jsonSchemaFormat(b.keyFrom(fieldType)) + } + return jsonName, modelDescription, prop + } + + // check if annotation says it is a string + if jsonTag := field.Tag.Get("json"); jsonTag != "" { + s := strings.Split(jsonTag, ",") + if len(s) > 1 && s[1] == "string" { + stringt := "string" + prop.Type = &stringt + return jsonName, modelDescription, prop + } + } + + fieldKind := fieldType.Kind() + switch { + case fieldKind == reflect.Struct: + jsonName, prop := b.buildStructTypeProperty(field, jsonName, model) + return jsonName, modelDescription, prop + case fieldKind == reflect.Slice || fieldKind == reflect.Array: + jsonName, prop := b.buildArrayTypeProperty(field, jsonName, modelName) + return jsonName, modelDescription, prop + case fieldKind == reflect.Ptr: + jsonName, prop := b.buildPointerTypeProperty(field, jsonName, modelName) + return jsonName, modelDescription, prop + case fieldKind == reflect.String: + stringt := "string" + prop.Type = &stringt + return jsonName, modelDescription, prop + case fieldKind == reflect.Map: + // if it's a map, it's unstructured, and swagger 1.2 can't handle it + objectType := "object" + prop.Type = &objectType + return jsonName, modelDescription, prop + } + + fieldTypeName := b.keyFrom(fieldType) + if b.isPrimitiveType(fieldTypeName) { + mapped := b.jsonSchemaType(fieldTypeName) + prop.Type = &mapped + prop.Format = b.jsonSchemaFormat(fieldTypeName) + return jsonName, modelDescription, prop + } + modelType := b.keyFrom(fieldType) + prop.Ref = &modelType + + if fieldType.Name() == "" { // override type of anonymous structs + nestedTypeName := modelName + "." + jsonName + prop.Ref = &nestedTypeName + b.addModel(fieldType, nestedTypeName) + } + return jsonName, modelDescription, prop +} + +func hasNamedJSONTag(field reflect.StructField) bool { + parts := strings.Split(field.Tag.Get("json"), ",") + if len(parts) == 0 { + return false + } + for _, s := range parts[1:] { + if s == "inline" { + return false + } + } + return len(parts[0]) > 0 +} + +func (b modelBuilder) buildStructTypeProperty(field reflect.StructField, jsonName string, model *Model) (nameJson string, prop ModelProperty) { + prop.setPropertyMetadata(field) + // Check for type override in tag + if prop.Type != nil { + return jsonName, prop + } + fieldType := field.Type + // check for anonymous + if len(fieldType.Name()) == 0 { + // anonymous + anonType := model.Id + "." + jsonName + b.addModel(fieldType, anonType) + prop.Ref = &anonType + return jsonName, prop + } + + if field.Name == fieldType.Name() && field.Anonymous && !hasNamedJSONTag(field) { + // embedded struct + sub := modelBuilder{new(ModelList), b.Config} + sub.addModel(fieldType, "") + subKey := sub.keyFrom(fieldType) + // merge properties from sub + subModel, _ := sub.Models.At(subKey) + subModel.Properties.Do(func(k string, v ModelProperty) { + model.Properties.Put(k, v) + // if subModel says this property is required then include it + required := false + for _, each := range subModel.Required { + if k == each { + required = true + break + } + } + if required { + model.Required = append(model.Required, k) + } + }) + // add all new referenced models + sub.Models.Do(func(key string, sub Model) { + if key != subKey { + if _, ok := b.Models.At(key); !ok { + b.Models.Put(key, sub) + } + } + }) + // empty name signals skip property + return "", prop + } + // simple struct + b.addModel(fieldType, "") + var pType = b.keyFrom(fieldType) + prop.Ref = &pType + return jsonName, prop +} + +func (b modelBuilder) buildArrayTypeProperty(field reflect.StructField, jsonName, modelName string) (nameJson string, prop ModelProperty) { + // check for type override in tags + prop.setPropertyMetadata(field) + if prop.Type != nil { + return jsonName, prop + } + fieldType := field.Type + if fieldType.Elem().Kind() == reflect.Uint8 { + stringt := "string" + prop.Type = &stringt + return jsonName, prop + } + var pType = "array" + prop.Type = &pType + isPrimitive := b.isPrimitiveType(fieldType.Elem().Name()) + elemTypeName := b.getElementTypeName(modelName, jsonName, fieldType.Elem()) + prop.Items = new(Item) + if isPrimitive { + mapped := b.jsonSchemaType(elemTypeName) + prop.Items.Type = &mapped + } else { + prop.Items.Ref = &elemTypeName + } + // add|overwrite model for element type + if fieldType.Elem().Kind() == reflect.Ptr { + fieldType = fieldType.Elem() + } + if !isPrimitive { + b.addModel(fieldType.Elem(), elemTypeName) + } + return jsonName, prop +} + +func (b modelBuilder) buildPointerTypeProperty(field reflect.StructField, jsonName, modelName string) (nameJson string, prop ModelProperty) { + prop.setPropertyMetadata(field) + // Check for type override in tags + if prop.Type != nil { + return jsonName, prop + } + fieldType := field.Type + + // override type of pointer to list-likes + if fieldType.Elem().Kind() == reflect.Slice || fieldType.Elem().Kind() == reflect.Array { + var pType = "array" + prop.Type = &pType + isPrimitive := b.isPrimitiveType(fieldType.Elem().Elem().Name()) + elemName := b.getElementTypeName(modelName, jsonName, fieldType.Elem().Elem()) + if isPrimitive { + primName := b.jsonSchemaType(elemName) + prop.Items = &Item{Ref: &primName} + } else { + prop.Items = &Item{Ref: &elemName} + } + if !isPrimitive { + // add|overwrite model for element type + b.addModel(fieldType.Elem().Elem(), elemName) + } + } else { + // non-array, pointer type + fieldTypeName := b.keyFrom(fieldType.Elem()) + var pType = b.jsonSchemaType(fieldTypeName) // no star, include pkg path + if b.isPrimitiveType(fieldTypeName) { + prop.Type = &pType + prop.Format = b.jsonSchemaFormat(fieldTypeName) + return jsonName, prop + } + prop.Ref = &pType + elemName := "" + if fieldType.Elem().Name() == "" { + elemName = modelName + "." + jsonName + prop.Ref = &elemName + } + b.addModel(fieldType.Elem(), elemName) + } + return jsonName, prop +} + +func (b modelBuilder) getElementTypeName(modelName, jsonName string, t reflect.Type) string { + if t.Kind() == reflect.Ptr { + t = t.Elem() + } + if t.Name() == "" { + return modelName + "." + jsonName + } + return b.keyFrom(t) +} + +func (b modelBuilder) keyFrom(st reflect.Type) string { + key := st.String() + if b.Config != nil && b.Config.ModelTypeNameHandler != nil { + if name, ok := b.Config.ModelTypeNameHandler(st); ok { + key = name + } + } + if len(st.Name()) == 0 { // unnamed type + // Swagger UI has special meaning for [ + key = strings.Replace(key, "[]", "||", -1) + } + return key +} + +// see also https://golang.org/ref/spec#Numeric_types +func (b modelBuilder) isPrimitiveType(modelName string) bool { + if len(modelName) == 0 { + return false + } + return strings.Contains("uint uint8 uint16 uint32 uint64 int int8 int16 int32 int64 float32 float64 bool string byte rune time.Time", modelName) +} + +// jsonNameOfField returns the name of the field as it should appear in JSON format +// An empty string indicates that this field is not part of the JSON representation +func (b modelBuilder) jsonNameOfField(field reflect.StructField) string { + if jsonTag := field.Tag.Get("json"); jsonTag != "" { + s := strings.Split(jsonTag, ",") + if s[0] == "-" { + // empty name signals skip property + return "" + } else if s[0] != "" { + return s[0] + } + } + return field.Name +} + +// see also http://json-schema.org/latest/json-schema-core.html#anchor8 +func (b modelBuilder) jsonSchemaType(modelName string) string { + schemaMap := map[string]string{ + "uint": "integer", + "uint8": "integer", + "uint16": "integer", + "uint32": "integer", + "uint64": "integer", + + "int": "integer", + "int8": "integer", + "int16": "integer", + "int32": "integer", + "int64": "integer", + + "byte": "integer", + "float64": "number", + "float32": "number", + "bool": "boolean", + "time.Time": "string", + } + mapped, ok := schemaMap[modelName] + if !ok { + return modelName // use as is (custom or struct) + } + return mapped +} + +func (b modelBuilder) jsonSchemaFormat(modelName string) string { + if b.Config != nil && b.Config.SchemaFormatHandler != nil { + if mapped := b.Config.SchemaFormatHandler(modelName); mapped != "" { + return mapped + } + } + schemaMap := map[string]string{ + "int": "int32", + "int32": "int32", + "int64": "int64", + "byte": "byte", + "uint": "integer", + "uint8": "byte", + "float64": "double", + "float32": "float", + "time.Time": "date-time", + "*time.Time": "date-time", + } + mapped, ok := schemaMap[modelName] + if !ok { + return "" // no format + } + return mapped +} diff --git a/vendor/github.com/emicklei/go-restful-swagger12/model_builder_test.go b/vendor/github.com/emicklei/go-restful-swagger12/model_builder_test.go new file mode 100644 index 000000000..a2d2f126f --- /dev/null +++ b/vendor/github.com/emicklei/go-restful-swagger12/model_builder_test.go @@ -0,0 +1,1283 @@ +package swagger + +import ( + "encoding/xml" + "net" + "reflect" + "testing" + "time" +) + +type YesNo bool + +func (y YesNo) MarshalJSON() ([]byte, error) { + if y { + return []byte("yes"), nil + } + return []byte("no"), nil +} + +// clear && go test -v -test.run TestRef_Issue190 ...swagger +func TestRef_Issue190(t *testing.T) { + type User struct { + items []string + } + testJsonFromStruct(t, User{}, `{ + "swagger.User": { + "id": "swagger.User", + "required": [ + "items" + ], + "properties": { + "items": { + "type": "array", + "items": { + "type": "string" + } + } + } + } + }`) +} + +func TestWithoutAdditionalFormat(t *testing.T) { + type mytime struct { + time.Time + } + type usemytime struct { + t mytime + } + testJsonFromStruct(t, usemytime{}, `{ + "swagger.usemytime": { + "id": "swagger.usemytime", + "required": [ + "t" + ], + "properties": { + "t": { + "type": "string" + } + } + } + }`) +} + +func TestWithAdditionalFormat(t *testing.T) { + type mytime struct { + time.Time + } + type usemytime struct { + t mytime + } + testJsonFromStructWithConfig(t, usemytime{}, `{ + "swagger.usemytime": { + "id": "swagger.usemytime", + "required": [ + "t" + ], + "properties": { + "t": { + "type": "string", + "format": "date-time" + } + } + } + }`, &Config{ + SchemaFormatHandler: func(typeName string) string { + switch typeName { + case "swagger.mytime": + return "date-time" + } + return "" + }, + }) +} + +// clear && go test -v -test.run TestCustomMarshaller_Issue96 ...swagger +func TestCustomMarshaller_Issue96(t *testing.T) { + type Vote struct { + What YesNo + } + testJsonFromStruct(t, Vote{}, `{ + "swagger.Vote": { + "id": "swagger.Vote", + "required": [ + "What" + ], + "properties": { + "What": { + "type": "string" + } + } + } + }`) +} + +// clear && go test -v -test.run TestPrimitiveTypes ...swagger +func TestPrimitiveTypes(t *testing.T) { + type Prims struct { + f float64 + t time.Time + } + testJsonFromStruct(t, Prims{}, `{ + "swagger.Prims": { + "id": "swagger.Prims", + "required": [ + "f", + "t" + ], + "properties": { + "f": { + "type": "number", + "format": "double" + }, + "t": { + "type": "string", + "format": "date-time" + } + } + } + }`) +} + +// clear && go test -v -test.run TestPrimitivePtrTypes ...swagger +func TestPrimitivePtrTypes(t *testing.T) { + type Prims struct { + f *float64 + t *time.Time + b *bool + s *string + i *int + } + testJsonFromStruct(t, Prims{}, `{ + "swagger.Prims": { + "id": "swagger.Prims", + "required": [ + "f", + "t", + "b", + "s", + "i" + ], + "properties": { + "b": { + "type": "boolean" + }, + "f": { + "type": "number", + "format": "double" + }, + "i": { + "type": "integer", + "format": "int32" + }, + "s": { + "type": "string" + }, + "t": { + "type": "string", + "format": "date-time" + } + } + } + }`) +} + +// clear && go test -v -test.run TestS1 ...swagger +func TestS1(t *testing.T) { + type S1 struct { + Id string + } + testJsonFromStruct(t, S1{}, `{ + "swagger.S1": { + "id": "swagger.S1", + "required": [ + "Id" + ], + "properties": { + "Id": { + "type": "string" + } + } + } + }`) +} + +// clear && go test -v -test.run TestS2 ...swagger +func TestS2(t *testing.T) { + type S2 struct { + Ids []string + } + testJsonFromStruct(t, S2{}, `{ + "swagger.S2": { + "id": "swagger.S2", + "required": [ + "Ids" + ], + "properties": { + "Ids": { + "type": "array", + "items": { + "type": "string" + } + } + } + } + }`) +} + +// clear && go test -v -test.run TestS3 ...swagger +func TestS3(t *testing.T) { + type NestedS3 struct { + Id string + } + type S3 struct { + Nested NestedS3 + } + testJsonFromStruct(t, S3{}, `{ + "swagger.NestedS3": { + "id": "swagger.NestedS3", + "required": [ + "Id" + ], + "properties": { + "Id": { + "type": "string" + } + } + }, + "swagger.S3": { + "id": "swagger.S3", + "required": [ + "Nested" + ], + "properties": { + "Nested": { + "$ref": "swagger.NestedS3" + } + } + } + }`) +} + +type sample struct { + id string `swagger:"required"` // TODO + items []item + rootItem item `json:"root" description:"root desc"` +} + +type item struct { + itemName string `json:"name"` +} + +// clear && go test -v -test.run TestSampleToModelAsJson ...swagger +func TestSampleToModelAsJson(t *testing.T) { + testJsonFromStruct(t, sample{items: []item{}}, `{ + "swagger.item": { + "id": "swagger.item", + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string" + } + } + }, + "swagger.sample": { + "id": "swagger.sample", + "required": [ + "id", + "items", + "root" + ], + "properties": { + "id": { + "type": "string" + }, + "items": { + "type": "array", + "items": { + "$ref": "swagger.item" + } + }, + "root": { + "$ref": "swagger.item", + "description": "root desc" + } + } + } + }`) +} + +func TestJsonTags(t *testing.T) { + type X struct { + A string + B string `json:"-"` + C int `json:",string"` + D int `json:","` + } + + expected := `{ + "swagger.X": { + "id": "swagger.X", + "required": [ + "A", + "C", + "D" + ], + "properties": { + "A": { + "type": "string" + }, + "C": { + "type": "string" + }, + "D": { + "type": "integer", + "format": "int32" + } + } + } + }` + + testJsonFromStruct(t, X{}, expected) +} + +func TestJsonTagOmitempty(t *testing.T) { + type X struct { + A int `json:",omitempty"` + B int `json:"C,omitempty"` + } + + expected := `{ + "swagger.X": { + "id": "swagger.X", + "properties": { + "A": { + "type": "integer", + "format": "int32" + }, + "C": { + "type": "integer", + "format": "int32" + } + } + } + }` + + testJsonFromStruct(t, X{}, expected) +} + +func TestJsonTagName(t *testing.T) { + type X struct { + A string `json:"B"` + } + + expected := `{ + "swagger.X": { + "id": "swagger.X", + "required": [ + "B" + ], + "properties": { + "B": { + "type": "string" + } + } + } + }` + + testJsonFromStruct(t, X{}, expected) +} + +func TestAnonymousStruct(t *testing.T) { + type X struct { + A struct { + B int + } + } + + expected := `{ + "swagger.X": { + "id": "swagger.X", + "required": [ + "A" + ], + "properties": { + "A": { + "$ref": "swagger.X.A" + } + } + }, + "swagger.X.A": { + "id": "swagger.X.A", + "required": [ + "B" + ], + "properties": { + "B": { + "type": "integer", + "format": "int32" + } + } + } + }` + + testJsonFromStruct(t, X{}, expected) +} + +func TestAnonymousPtrStruct(t *testing.T) { + type X struct { + A *struct { + B int + } + } + + expected := `{ + "swagger.X": { + "id": "swagger.X", + "required": [ + "A" + ], + "properties": { + "A": { + "$ref": "swagger.X.A" + } + } + }, + "swagger.X.A": { + "id": "swagger.X.A", + "required": [ + "B" + ], + "properties": { + "B": { + "type": "integer", + "format": "int32" + } + } + } + }` + + testJsonFromStruct(t, X{}, expected) +} + +func TestAnonymousArrayStruct(t *testing.T) { + type X struct { + A []struct { + B int + } + } + + expected := `{ + "swagger.X": { + "id": "swagger.X", + "required": [ + "A" + ], + "properties": { + "A": { + "type": "array", + "items": { + "$ref": "swagger.X.A" + } + } + } + }, + "swagger.X.A": { + "id": "swagger.X.A", + "required": [ + "B" + ], + "properties": { + "B": { + "type": "integer", + "format": "int32" + } + } + } + }` + + testJsonFromStruct(t, X{}, expected) +} + +func TestAnonymousPtrArrayStruct(t *testing.T) { + type X struct { + A *[]struct { + B int + } + } + + expected := `{ + "swagger.X": { + "id": "swagger.X", + "required": [ + "A" + ], + "properties": { + "A": { + "type": "array", + "items": { + "$ref": "swagger.X.A" + } + } + } + }, + "swagger.X.A": { + "id": "swagger.X.A", + "required": [ + "B" + ], + "properties": { + "B": { + "type": "integer", + "format": "int32" + } + } + } + }` + + testJsonFromStruct(t, X{}, expected) +} + +// go test -v -test.run TestEmbeddedStruct_Issue98 ...swagger +func TestEmbeddedStruct_Issue98(t *testing.T) { + type Y struct { + A int + } + type X struct { + Y + } + testJsonFromStruct(t, X{}, `{ + "swagger.X": { + "id": "swagger.X", + "required": [ + "A" + ], + "properties": { + "A": { + "type": "integer", + "format": "int32" + } + } + } + }`) +} + +type Dataset struct { + Names []string +} + +// clear && go test -v -test.run TestIssue85 ...swagger +func TestIssue85(t *testing.T) { + anon := struct{ Datasets []Dataset }{} + testJsonFromStruct(t, anon, `{ + "struct { Datasets ||swagger.Dataset }": { + "id": "struct { Datasets ||swagger.Dataset }", + "required": [ + "Datasets" + ], + "properties": { + "Datasets": { + "type": "array", + "items": { + "$ref": "swagger.Dataset" + } + } + } + }, + "swagger.Dataset": { + "id": "swagger.Dataset", + "required": [ + "Names" + ], + "properties": { + "Names": { + "type": "array", + "items": { + "type": "string" + } + } + } + } + }`) +} + +type File struct { + History []File + HistoryPtrs []*File +} + +// go test -v -test.run TestRecursiveStructure ...swagger +func TestRecursiveStructure(t *testing.T) { + testJsonFromStruct(t, File{}, `{ + "swagger.File": { + "id": "swagger.File", + "required": [ + "History", + "HistoryPtrs" + ], + "properties": { + "History": { + "type": "array", + "items": { + "$ref": "swagger.File" + } + }, + "HistoryPtrs": { + "type": "array", + "items": { + "$ref": "swagger.File" + } + } + } + } + }`) +} + +type A1 struct { + B struct { + Id int + Comment string `json:"comment,omitempty"` + } +} + +// go test -v -test.run TestEmbeddedStructA1 ...swagger +func TestEmbeddedStructA1(t *testing.T) { + testJsonFromStruct(t, A1{}, `{ + "swagger.A1": { + "id": "swagger.A1", + "required": [ + "B" + ], + "properties": { + "B": { + "$ref": "swagger.A1.B" + } + } + }, + "swagger.A1.B": { + "id": "swagger.A1.B", + "required": [ + "Id" + ], + "properties": { + "Id": { + "type": "integer", + "format": "int32" + }, + "comment": { + "type": "string" + } + } + } + }`) +} + +type A2 struct { + C +} +type C struct { + Id int `json:"B"` + Comment string `json:"comment,omitempty"` + Secure bool `json:"secure"` +} + +// go test -v -test.run TestEmbeddedStructA2 ...swagger +func TestEmbeddedStructA2(t *testing.T) { + testJsonFromStruct(t, A2{}, `{ + "swagger.A2": { + "id": "swagger.A2", + "required": [ + "B", + "secure" + ], + "properties": { + "B": { + "type": "integer", + "format": "int32" + }, + "comment": { + "type": "string" + }, + "secure": { + "type": "boolean" + } + } + } + }`) +} + +type A3 struct { + B D +} + +type D struct { + Id int +} + +// clear && go test -v -test.run TestStructA3 ...swagger +func TestStructA3(t *testing.T) { + testJsonFromStruct(t, A3{}, `{ + "swagger.A3": { + "id": "swagger.A3", + "required": [ + "B" + ], + "properties": { + "B": { + "$ref": "swagger.D" + } + } + }, + "swagger.D": { + "id": "swagger.D", + "required": [ + "Id" + ], + "properties": { + "Id": { + "type": "integer", + "format": "int32" + } + } + } + }`) +} + +type A4 struct { + D "json:,inline" +} + +// clear && go test -v -test.run TestStructA4 ...swagger +func TestEmbeddedStructA4(t *testing.T) { + testJsonFromStruct(t, A4{}, `{ + "swagger.A4": { + "id": "swagger.A4", + "required": [ + "Id" + ], + "properties": { + "Id": { + "type": "integer", + "format": "int32" + } + } + } + }`) +} + +type A5 struct { + D `json:"d"` +} + +// clear && go test -v -test.run TestStructA5 ...swagger +func TestEmbeddedStructA5(t *testing.T) { + testJsonFromStruct(t, A5{}, `{ + "swagger.A5": { + "id": "swagger.A5", + "required": [ + "d" + ], + "properties": { + "d": { + "$ref": "swagger.D" + } + } + }, + "swagger.D": { + "id": "swagger.D", + "required": [ + "Id" + ], + "properties": { + "Id": { + "type": "integer", + "format": "int32" + } + } + } + }`) +} + +type D2 struct { + id int + D []D +} + +type A6 struct { + D2 "json:,inline" +} + +// clear && go test -v -test.run TestStructA4 ...swagger +func TestEmbeddedStructA6(t *testing.T) { + testJsonFromStruct(t, A6{}, `{ + "swagger.A6": { + "id": "swagger.A6", + "required": [ + "id", + "D" + ], + "properties": { + "D": { + "type": "array", + "items": { + "$ref": "swagger.D" + } + }, + "id": { + "type": "integer", + "format": "int32" + } + } + }, + "swagger.D": { + "id": "swagger.D", + "required": [ + "Id" + ], + "properties": { + "Id": { + "type": "integer", + "format": "int32" + } + } + } + }`) +} + +type ObjectId []byte + +type Region struct { + Id ObjectId `bson:"_id" json:"id"` + Name string `bson:"name" json:"name"` + Type string `bson:"type" json:"type"` +} + +// clear && go test -v -test.run TestRegion_Issue113 ...swagger +func TestRegion_Issue113(t *testing.T) { + testJsonFromStruct(t, []Region{}, `{ + "||swagger.Region": { + "id": "||swagger.Region", + "properties": {} + }, + "swagger.Region": { + "id": "swagger.Region", + "required": [ + "id", + "name", + "type" + ], + "properties": { + "id": { + "type": "string" + }, + "name": { + "type": "string" + }, + "type": { + "type": "string" + } + } + } + }`) +} + +// clear && go test -v -test.run TestIssue158 ...swagger +func TestIssue158(t *testing.T) { + type Address struct { + Country string `json:"country,omitempty"` + } + + type Customer struct { + Name string `json:"name"` + Address Address `json:"address"` + } + expected := `{ + "swagger.Address": { + "id": "swagger.Address", + "properties": { + "country": { + "type": "string" + } + } + }, + "swagger.Customer": { + "id": "swagger.Customer", + "required": [ + "name", + "address" + ], + "properties": { + "address": { + "$ref": "swagger.Address" + }, + "name": { + "type": "string" + } + } + } + }` + testJsonFromStruct(t, Customer{}, expected) +} + +func TestPointers(t *testing.T) { + type Vote struct { + What YesNo + } + testJsonFromStruct(t, &Vote{}, `{ + "swagger.Vote": { + "id": "swagger.Vote", + "required": [ + "What" + ], + "properties": { + "What": { + "type": "string" + } + } + } + }`) +} + +func TestSlices(t *testing.T) { + type Address struct { + Country string `json:"country,omitempty"` + } + expected := `{ + "swagger.Address": { + "id": "swagger.Address", + "properties": { + "country": { + "type": "string" + } + } + }, + "swagger.Customer": { + "id": "swagger.Customer", + "required": [ + "name", + "addresses" + ], + "properties": { + "addresses": { + "type": "array", + "items": { + "$ref": "swagger.Address" + } + }, + "name": { + "type": "string" + } + } + } + }` + // both slices (with pointer value and with type value) should have equal swagger representation + { + type Customer struct { + Name string `json:"name"` + Addresses []Address `json:"addresses"` + } + testJsonFromStruct(t, Customer{}, expected) + } + { + type Customer struct { + Name string `json:"name"` + Addresses []*Address `json:"addresses"` + } + testJsonFromStruct(t, Customer{}, expected) + } + +} + +type Name struct { + Value string +} + +func (n Name) PostBuildModel(m *Model) *Model { + m.Description = "titles must be upcase" + return m +} + +type TOC struct { + Titles []Name +} + +type Discography struct { + Title Name + TOC +} + +// clear && go test -v -test.run TestEmbeddedStructPull204 ...swagger +func TestEmbeddedStructPull204(t *testing.T) { + b := Discography{} + testJsonFromStruct(t, b, ` +{ + "swagger.Discography": { + "id": "swagger.Discography", + "required": [ + "Title", + "Titles" + ], + "properties": { + "Title": { + "$ref": "swagger.Name" + }, + "Titles": { + "type": "array", + "items": { + "$ref": "swagger.Name" + } + } + } + }, + "swagger.Name": { + "id": "swagger.Name", + "required": [ + "Value" + ], + "properties": { + "Value": { + "type": "string" + } + } + } + } +`) +} + +type AddressWithMethod struct { + Country string `json:"country,omitempty"` + PostCode int `json:"postcode,omitempty"` +} + +func (AddressWithMethod) SwaggerDoc() map[string]string { + return map[string]string{ + "": "Address doc", + "country": "Country doc", + "postcode": "PostCode doc", + } +} + +func TestDocInMethodSwaggerDoc(t *testing.T) { + expected := `{ + "swagger.AddressWithMethod": { + "id": "swagger.AddressWithMethod", + "description": "Address doc", + "properties": { + "country": { + "type": "string", + "description": "Country doc" + }, + "postcode": { + "type": "integer", + "format": "int32", + "description": "PostCode doc" + } + } + } + }` + testJsonFromStruct(t, AddressWithMethod{}, expected) +} + +type RefDesc struct { + f1 *int64 `description:"desc"` +} + +func TestPtrDescription(t *testing.T) { + b := RefDesc{} + expected := `{ + "swagger.RefDesc": { + "id": "swagger.RefDesc", + "required": [ + "f1" + ], + "properties": { + "f1": { + "type": "integer", + "format": "int64", + "description": "desc" + } + } + } + }` + testJsonFromStruct(t, b, expected) +} + +type A struct { + B `json:",inline"` + C1 `json:"metadata,omitempty"` +} + +type B struct { + SB string +} + +type C1 struct { + SC string +} + +func (A) SwaggerDoc() map[string]string { + return map[string]string{ + "": "A struct", + "B": "B field", // We should not get anything from this + "metadata": "C1 field", + } +} + +func (B) SwaggerDoc() map[string]string { + return map[string]string{ + "": "B struct", + "SB": "SB field", + } +} + +func (C1) SwaggerDoc() map[string]string { + return map[string]string{ + "": "C1 struct", + "SC": "SC field", + } +} + +func TestNestedStructDescription(t *testing.T) { + expected := ` +{ + "swagger.A": { + "id": "swagger.A", + "description": "A struct", + "required": [ + "SB" + ], + "properties": { + "SB": { + "type": "string", + "description": "SB field" + }, + "metadata": { + "$ref": "swagger.C1", + "description": "C1 field" + } + } + }, + "swagger.C1": { + "id": "swagger.C1", + "description": "C1 struct", + "required": [ + "SC" + ], + "properties": { + "SC": { + "type": "string", + "description": "SC field" + } + } + } + } +` + testJsonFromStruct(t, A{}, expected) +} + +// This tests a primitive with type overrides in the struct tags +type FakeInt int +type E struct { + Id FakeInt `type:"integer"` + IP net.IP `type:"string"` +} + +func TestOverridenTypeTagE1(t *testing.T) { + expected := ` +{ + "swagger.E": { + "id": "swagger.E", + "required": [ + "Id", + "IP" + ], + "properties": { + "Id": { + "type": "integer" + }, + "IP": { + "type": "string" + } + } + } + } +` + testJsonFromStruct(t, E{}, expected) +} + +type XmlNamed struct { + XMLName xml.Name `xml:"user"` + Id string `json:"id" xml:"id"` + Name string `json:"name" xml:"name"` +} + +func TestXmlNameStructs(t *testing.T) { + expected := ` +{ + "swagger.XmlNamed": { + "id": "swagger.XmlNamed", + "required": [ + "id", + "name" + ], + "properties": { + "id": { + "type": "string" + }, + "name": { + "type": "string" + } + } + } + } +` + testJsonFromStruct(t, XmlNamed{}, expected) +} + +func TestNameCustomization(t *testing.T) { + expected := ` +{ + "swagger.A": { + "id": "swagger.A", + "description": "A struct", + "required": [ + "SB" + ], + "properties": { + "SB": { + "type": "string", + "description": "SB field" + }, + "metadata": { + "$ref": "new.swagger.SpecialC1", + "description": "C1 field" + } + } + }, + "new.swagger.SpecialC1": { + "id": "new.swagger.SpecialC1", + "description": "C1 struct", + "required": [ + "SC" + ], + "properties": { + "SC": { + "type": "string", + "description": "SC field" + } + } + } + }` + + testJsonFromStructWithConfig(t, A{}, expected, &Config{ + ModelTypeNameHandler: func(t reflect.Type) (string, bool) { + if t == reflect.TypeOf(C1{}) { + return "new.swagger.SpecialC1", true + } + return "", false + }, + }) +} diff --git a/vendor/github.com/emicklei/go-restful-swagger12/model_list.go b/vendor/github.com/emicklei/go-restful-swagger12/model_list.go new file mode 100644 index 000000000..9bb6cb678 --- /dev/null +++ b/vendor/github.com/emicklei/go-restful-swagger12/model_list.go @@ -0,0 +1,86 @@ +package swagger + +// Copyright 2015 Ernest Micklei. All rights reserved. +// Use of this source code is governed by a license +// that can be found in the LICENSE file. + +import ( + "bytes" + "encoding/json" +) + +// NamedModel associates a name with a Model (not using its Id) +type NamedModel struct { + Name string + Model Model +} + +// ModelList encapsulates a list of NamedModel (association) +type ModelList struct { + List []NamedModel +} + +// Put adds or replaces a Model by its name +func (l *ModelList) Put(name string, model Model) { + for i, each := range l.List { + if each.Name == name { + // replace + l.List[i] = NamedModel{name, model} + return + } + } + // add + l.List = append(l.List, NamedModel{name, model}) +} + +// At returns a Model by its name, ok is false if absent +func (l *ModelList) At(name string) (m Model, ok bool) { + for _, each := range l.List { + if each.Name == name { + return each.Model, true + } + } + return m, false +} + +// Do enumerates all the models, each with its assigned name +func (l *ModelList) Do(block func(name string, value Model)) { + for _, each := range l.List { + block(each.Name, each.Model) + } +} + +// MarshalJSON writes the ModelList as if it was a map[string]Model +func (l ModelList) MarshalJSON() ([]byte, error) { + var buf bytes.Buffer + encoder := json.NewEncoder(&buf) + buf.WriteString("{\n") + for i, each := range l.List { + buf.WriteString("\"") + buf.WriteString(each.Name) + buf.WriteString("\": ") + encoder.Encode(each.Model) + if i < len(l.List)-1 { + buf.WriteString(",\n") + } + } + buf.WriteString("}") + return buf.Bytes(), nil +} + +// UnmarshalJSON reads back a ModelList. This is an expensive operation. +func (l *ModelList) UnmarshalJSON(data []byte) error { + raw := map[string]interface{}{} + json.NewDecoder(bytes.NewReader(data)).Decode(&raw) + for k, v := range raw { + // produces JSON bytes for each value + data, err := json.Marshal(v) + if err != nil { + return err + } + var m Model + json.NewDecoder(bytes.NewReader(data)).Decode(&m) + l.Put(k, m) + } + return nil +} diff --git a/vendor/github.com/emicklei/go-restful-swagger12/model_list_test.go b/vendor/github.com/emicklei/go-restful-swagger12/model_list_test.go new file mode 100644 index 000000000..9a9ab919b --- /dev/null +++ b/vendor/github.com/emicklei/go-restful-swagger12/model_list_test.go @@ -0,0 +1,48 @@ +package swagger + +import ( + "encoding/json" + "testing" +) + +func TestModelList(t *testing.T) { + m := Model{} + m.Id = "m" + l := ModelList{} + l.Put("m", m) + k, ok := l.At("m") + if !ok { + t.Error("want model back") + } + if got, want := k.Id, "m"; got != want { + t.Errorf("got %v want %v", got, want) + } +} + +func TestModelList_Marshal(t *testing.T) { + l := ModelList{} + m := Model{Id: "myid"} + l.Put("myid", m) + data, err := json.Marshal(l) + if err != nil { + t.Error(err) + } + if got, want := string(data), `{"myid":{"id":"myid","properties":{}}}`; got != want { + t.Errorf("got %v want %v", got, want) + } +} + +func TestModelList_Unmarshal(t *testing.T) { + data := `{"myid":{"id":"myid","properties":{}}}` + l := ModelList{} + if err := json.Unmarshal([]byte(data), &l); err != nil { + t.Error(err) + } + m, ok := l.At("myid") + if !ok { + t.Error("expected myid") + } + if got, want := m.Id, "myid"; got != want { + t.Errorf("got %v want %v", got, want) + } +} diff --git a/vendor/github.com/emicklei/go-restful-swagger12/model_property_ext.go b/vendor/github.com/emicklei/go-restful-swagger12/model_property_ext.go new file mode 100644 index 000000000..a433b6b70 --- /dev/null +++ b/vendor/github.com/emicklei/go-restful-swagger12/model_property_ext.go @@ -0,0 +1,81 @@ +package swagger + +import ( + "reflect" + "strings" +) + +func (prop *ModelProperty) setDescription(field reflect.StructField) { + if tag := field.Tag.Get("description"); tag != "" { + prop.Description = tag + } +} + +func (prop *ModelProperty) setDefaultValue(field reflect.StructField) { + if tag := field.Tag.Get("default"); tag != "" { + prop.DefaultValue = Special(tag) + } +} + +func (prop *ModelProperty) setEnumValues(field reflect.StructField) { + // We use | to separate the enum values. This value is chosen + // since its unlikely to be useful in actual enumeration values. + if tag := field.Tag.Get("enum"); tag != "" { + prop.Enum = strings.Split(tag, "|") + } +} + +func (prop *ModelProperty) setMaximum(field reflect.StructField) { + if tag := field.Tag.Get("maximum"); tag != "" { + prop.Maximum = tag + } +} + +func (prop *ModelProperty) setType(field reflect.StructField) { + if tag := field.Tag.Get("type"); tag != "" { + // Check if the first two characters of the type tag are + // intended to emulate slice/array behaviour. + // + // If type is intended to be a slice/array then add the + // overriden type to the array item instead of the main property + if len(tag) > 2 && tag[0:2] == "[]" { + pType := "array" + prop.Type = &pType + prop.Items = new(Item) + + iType := tag[2:] + prop.Items.Type = &iType + return + } + + prop.Type = &tag + } +} + +func (prop *ModelProperty) setMinimum(field reflect.StructField) { + if tag := field.Tag.Get("minimum"); tag != "" { + prop.Minimum = tag + } +} + +func (prop *ModelProperty) setUniqueItems(field reflect.StructField) { + tag := field.Tag.Get("unique") + switch tag { + case "true": + v := true + prop.UniqueItems = &v + case "false": + v := false + prop.UniqueItems = &v + } +} + +func (prop *ModelProperty) setPropertyMetadata(field reflect.StructField) { + prop.setDescription(field) + prop.setEnumValues(field) + prop.setMinimum(field) + prop.setMaximum(field) + prop.setUniqueItems(field) + prop.setDefaultValue(field) + prop.setType(field) +} diff --git a/vendor/github.com/emicklei/go-restful-swagger12/model_property_ext_test.go b/vendor/github.com/emicklei/go-restful-swagger12/model_property_ext_test.go new file mode 100644 index 000000000..1123ed992 --- /dev/null +++ b/vendor/github.com/emicklei/go-restful-swagger12/model_property_ext_test.go @@ -0,0 +1,70 @@ +package swagger + +import ( + "net" + "testing" +) + +// clear && go test -v -test.run TestThatExtraTagsAreReadIntoModel ...swagger +func TestThatExtraTagsAreReadIntoModel(t *testing.T) { + type fakeint int + type fakearray string + type Anything struct { + Name string `description:"name" modelDescription:"a test"` + Size int `minimum:"0" maximum:"10"` + Stati string `enum:"off|on" default:"on" modelDescription:"more description"` + ID string `unique:"true"` + FakeInt fakeint `type:"integer"` + FakeArray fakearray `type:"[]string"` + IP net.IP `type:"string"` + Password string + } + m := modelsFromStruct(Anything{}) + props, _ := m.At("swagger.Anything") + p1, _ := props.Properties.At("Name") + if got, want := p1.Description, "name"; got != want { + t.Errorf("got %v want %v", got, want) + } + p2, _ := props.Properties.At("Size") + if got, want := p2.Minimum, "0"; got != want { + t.Errorf("got %v want %v", got, want) + } + if got, want := p2.Maximum, "10"; got != want { + t.Errorf("got %v want %v", got, want) + } + p3, _ := props.Properties.At("Stati") + if got, want := p3.Enum[0], "off"; got != want { + t.Errorf("got %v want %v", got, want) + } + if got, want := p3.Enum[1], "on"; got != want { + t.Errorf("got %v want %v", got, want) + } + p4, _ := props.Properties.At("ID") + if got, want := *p4.UniqueItems, true; got != want { + t.Errorf("got %v want %v", got, want) + } + p5, _ := props.Properties.At("Password") + if got, want := *p5.Type, "string"; got != want { + t.Errorf("got %v want %v", got, want) + } + p6, _ := props.Properties.At("FakeInt") + if got, want := *p6.Type, "integer"; got != want { + t.Errorf("got %v want %v", got, want) + } + p7, _ := props.Properties.At("FakeArray") + if got, want := *p7.Type, "array"; got != want { + t.Errorf("got %v want %v", got, want) + } + p7p, _ := props.Properties.At("FakeArray") + if got, want := *p7p.Items.Type, "string"; got != want { + t.Errorf("got %v want %v", got, want) + } + p8, _ := props.Properties.At("IP") + if got, want := *p8.Type, "string"; got != want { + t.Errorf("got %v want %v", got, want) + } + + if got, want := props.Description, "a test\nmore description"; got != want { + t.Errorf("got %v want %v", got, want) + } +} diff --git a/vendor/github.com/emicklei/go-restful-swagger12/model_property_list.go b/vendor/github.com/emicklei/go-restful-swagger12/model_property_list.go new file mode 100644 index 000000000..3babb1944 --- /dev/null +++ b/vendor/github.com/emicklei/go-restful-swagger12/model_property_list.go @@ -0,0 +1,87 @@ +package swagger + +// Copyright 2015 Ernest Micklei. All rights reserved. +// Use of this source code is governed by a license +// that can be found in the LICENSE file. + +import ( + "bytes" + "encoding/json" +) + +// NamedModelProperty associates a name to a ModelProperty +type NamedModelProperty struct { + Name string + Property ModelProperty +} + +// ModelPropertyList encapsulates a list of NamedModelProperty (association) +type ModelPropertyList struct { + List []NamedModelProperty +} + +// At returns the ModelPropety by its name unless absent, then ok is false +func (l *ModelPropertyList) At(name string) (p ModelProperty, ok bool) { + for _, each := range l.List { + if each.Name == name { + return each.Property, true + } + } + return p, false +} + +// Put adds or replaces a ModelProperty with this name +func (l *ModelPropertyList) Put(name string, prop ModelProperty) { + // maybe replace existing + for i, each := range l.List { + if each.Name == name { + // replace + l.List[i] = NamedModelProperty{Name: name, Property: prop} + return + } + } + // add + l.List = append(l.List, NamedModelProperty{Name: name, Property: prop}) +} + +// Do enumerates all the properties, each with its assigned name +func (l *ModelPropertyList) Do(block func(name string, value ModelProperty)) { + for _, each := range l.List { + block(each.Name, each.Property) + } +} + +// MarshalJSON writes the ModelPropertyList as if it was a map[string]ModelProperty +func (l ModelPropertyList) MarshalJSON() ([]byte, error) { + var buf bytes.Buffer + encoder := json.NewEncoder(&buf) + buf.WriteString("{\n") + for i, each := range l.List { + buf.WriteString("\"") + buf.WriteString(each.Name) + buf.WriteString("\": ") + encoder.Encode(each.Property) + if i < len(l.List)-1 { + buf.WriteString(",\n") + } + } + buf.WriteString("}") + return buf.Bytes(), nil +} + +// UnmarshalJSON reads back a ModelPropertyList. This is an expensive operation. +func (l *ModelPropertyList) UnmarshalJSON(data []byte) error { + raw := map[string]interface{}{} + json.NewDecoder(bytes.NewReader(data)).Decode(&raw) + for k, v := range raw { + // produces JSON bytes for each value + data, err := json.Marshal(v) + if err != nil { + return err + } + var m ModelProperty + json.NewDecoder(bytes.NewReader(data)).Decode(&m) + l.Put(k, m) + } + return nil +} diff --git a/vendor/github.com/emicklei/go-restful-swagger12/model_property_list_test.go b/vendor/github.com/emicklei/go-restful-swagger12/model_property_list_test.go new file mode 100644 index 000000000..2833ad8fd --- /dev/null +++ b/vendor/github.com/emicklei/go-restful-swagger12/model_property_list_test.go @@ -0,0 +1,47 @@ +package swagger + +import ( + "encoding/json" + "testing" +) + +func TestModelPropertyList(t *testing.T) { + l := ModelPropertyList{} + p := ModelProperty{Description: "d"} + l.Put("p", p) + q, ok := l.At("p") + if !ok { + t.Error("expected p") + } + if got, want := q.Description, "d"; got != want { + t.Errorf("got %v want %v", got, want) + } +} + +func TestModelPropertyList_Marshal(t *testing.T) { + l := ModelPropertyList{} + p := ModelProperty{Description: "d"} + l.Put("p", p) + data, err := json.Marshal(l) + if err != nil { + t.Error(err) + } + if got, want := string(data), `{"p":{"description":"d"}}`; got != want { + t.Errorf("got %v want %v", got, want) + } +} + +func TestModelPropertyList_Unmarshal(t *testing.T) { + data := `{"p":{"description":"d"}}` + l := ModelPropertyList{} + if err := json.Unmarshal([]byte(data), &l); err != nil { + t.Error(err) + } + m, ok := l.At("p") + if !ok { + t.Error("expected p") + } + if got, want := m.Description, "d"; got != want { + t.Errorf("got %v want %v", got, want) + } +} diff --git a/vendor/github.com/emicklei/go-restful-swagger12/ordered_route_map.go b/vendor/github.com/emicklei/go-restful-swagger12/ordered_route_map.go new file mode 100644 index 000000000..b33ccfbeb --- /dev/null +++ b/vendor/github.com/emicklei/go-restful-swagger12/ordered_route_map.go @@ -0,0 +1,36 @@ +package swagger + +// Copyright 2015 Ernest Micklei. All rights reserved. +// Use of this source code is governed by a license +// that can be found in the LICENSE file. + +import "github.com/emicklei/go-restful" + +type orderedRouteMap struct { + elements map[string][]restful.Route + keys []string +} + +func newOrderedRouteMap() *orderedRouteMap { + return &orderedRouteMap{ + elements: map[string][]restful.Route{}, + keys: []string{}, + } +} + +func (o *orderedRouteMap) Add(key string, route restful.Route) { + routes, ok := o.elements[key] + if ok { + routes = append(routes, route) + o.elements[key] = routes + return + } + o.elements[key] = []restful.Route{route} + o.keys = append(o.keys, key) +} + +func (o *orderedRouteMap) Do(block func(key string, routes []restful.Route)) { + for _, k := range o.keys { + block(k, o.elements[k]) + } +} diff --git a/vendor/github.com/emicklei/go-restful-swagger12/ordered_route_map_test.go b/vendor/github.com/emicklei/go-restful-swagger12/ordered_route_map_test.go new file mode 100644 index 000000000..964e7da05 --- /dev/null +++ b/vendor/github.com/emicklei/go-restful-swagger12/ordered_route_map_test.go @@ -0,0 +1,29 @@ +package swagger + +import ( + "testing" + + "github.com/emicklei/go-restful" +) + +// go test -v -test.run TestOrderedRouteMap ...swagger +func TestOrderedRouteMap(t *testing.T) { + m := newOrderedRouteMap() + r1 := restful.Route{Path: "/r1"} + r2 := restful.Route{Path: "/r2"} + m.Add("a", r1) + m.Add("b", r2) + m.Add("b", r1) + m.Add("d", r2) + m.Add("c", r2) + order := "" + m.Do(func(k string, routes []restful.Route) { + order += k + if len(routes) == 0 { + t.Fail() + } + }) + if order != "abdc" { + t.Fail() + } +} diff --git a/vendor/github.com/emicklei/go-restful-swagger12/postbuild_model_test.go b/vendor/github.com/emicklei/go-restful-swagger12/postbuild_model_test.go new file mode 100644 index 000000000..3e20d2f5b --- /dev/null +++ b/vendor/github.com/emicklei/go-restful-swagger12/postbuild_model_test.go @@ -0,0 +1,42 @@ +package swagger + +import "testing" + +type Boat struct { + Length int `json:"-"` // on default, this makes the fields not required + Weight int `json:"-"` +} + +// PostBuildModel is from swagger.ModelBuildable +func (b Boat) PostBuildModel(m *Model) *Model { + // override required + m.Required = []string{"Length", "Weight"} + + // add model property (just to test is can be added; is this a real usecase?) + extraType := "string" + m.Properties.Put("extra", ModelProperty{ + Description: "extra description", + DataTypeFields: DataTypeFields{ + Type: &extraType, + }, + }) + return m +} + +func TestCustomPostModelBuilde(t *testing.T) { + testJsonFromStruct(t, Boat{}, `{ + "swagger.Boat": { + "id": "swagger.Boat", + "required": [ + "Length", + "Weight" + ], + "properties": { + "extra": { + "type": "string", + "description": "extra description" + } + } + } +}`) +} diff --git a/vendor/github.com/emicklei/go-restful-swagger12/swagger.go b/vendor/github.com/emicklei/go-restful-swagger12/swagger.go new file mode 100644 index 000000000..9c40833e7 --- /dev/null +++ b/vendor/github.com/emicklei/go-restful-swagger12/swagger.go @@ -0,0 +1,185 @@ +// Package swagger implements the structures of the Swagger +// https://github.com/wordnik/swagger-spec/blob/master/versions/1.2.md +package swagger + +const swaggerVersion = "1.2" + +// 4.3.3 Data Type Fields +type DataTypeFields struct { + Type *string `json:"type,omitempty"` // if Ref not used + Ref *string `json:"$ref,omitempty"` // if Type not used + Format string `json:"format,omitempty"` + DefaultValue Special `json:"defaultValue,omitempty"` + Enum []string `json:"enum,omitempty"` + Minimum string `json:"minimum,omitempty"` + Maximum string `json:"maximum,omitempty"` + Items *Item `json:"items,omitempty"` + UniqueItems *bool `json:"uniqueItems,omitempty"` +} + +type Special string + +// 4.3.4 Items Object +type Item struct { + Type *string `json:"type,omitempty"` + Ref *string `json:"$ref,omitempty"` + Format string `json:"format,omitempty"` +} + +// 5.1 Resource Listing +type ResourceListing struct { + SwaggerVersion string `json:"swaggerVersion"` // e.g 1.2 + Apis []Resource `json:"apis"` + ApiVersion string `json:"apiVersion"` + Info Info `json:"info"` + Authorizations []Authorization `json:"authorizations,omitempty"` +} + +// 5.1.2 Resource Object +type Resource struct { + Path string `json:"path"` // relative or absolute, must start with / + Description string `json:"description"` +} + +// 5.1.3 Info Object +type Info struct { + Title string `json:"title"` + Description string `json:"description"` + TermsOfServiceUrl string `json:"termsOfServiceUrl,omitempty"` + Contact string `json:"contact,omitempty"` + License string `json:"license,omitempty"` + LicenseUrl string `json:"licenseUrl,omitempty"` +} + +// 5.1.5 +type Authorization struct { + Type string `json:"type"` + PassAs string `json:"passAs"` + Keyname string `json:"keyname"` + Scopes []Scope `json:"scopes"` + GrantTypes []GrantType `json:"grandTypes"` +} + +// 5.1.6, 5.2.11 +type Scope struct { + // Required. The name of the scope. + Scope string `json:"scope"` + // Recommended. A short description of the scope. + Description string `json:"description"` +} + +// 5.1.7 +type GrantType struct { + Implicit Implicit `json:"implicit"` + AuthorizationCode AuthorizationCode `json:"authorization_code"` +} + +// 5.1.8 Implicit Object +type Implicit struct { + // Required. The login endpoint definition. + loginEndpoint LoginEndpoint `json:"loginEndpoint"` + // An optional alternative name to standard "access_token" OAuth2 parameter. + TokenName string `json:"tokenName"` +} + +// 5.1.9 Authorization Code Object +type AuthorizationCode struct { + TokenRequestEndpoint TokenRequestEndpoint `json:"tokenRequestEndpoint"` + TokenEndpoint TokenEndpoint `json:"tokenEndpoint"` +} + +// 5.1.10 Login Endpoint Object +type LoginEndpoint struct { + // Required. The URL of the authorization endpoint for the implicit grant flow. The value SHOULD be in a URL format. + Url string `json:"url"` +} + +// 5.1.11 Token Request Endpoint Object +type TokenRequestEndpoint struct { + // Required. The URL of the authorization endpoint for the authentication code grant flow. The value SHOULD be in a URL format. + Url string `json:"url"` + // An optional alternative name to standard "client_id" OAuth2 parameter. + ClientIdName string `json:"clientIdName"` + // An optional alternative name to the standard "client_secret" OAuth2 parameter. + ClientSecretName string `json:"clientSecretName"` +} + +// 5.1.12 Token Endpoint Object +type TokenEndpoint struct { + // Required. The URL of the token endpoint for the authentication code grant flow. The value SHOULD be in a URL format. + Url string `json:"url"` + // An optional alternative name to standard "access_token" OAuth2 parameter. + TokenName string `json:"tokenName"` +} + +// 5.2 API Declaration +type ApiDeclaration struct { + SwaggerVersion string `json:"swaggerVersion"` + ApiVersion string `json:"apiVersion"` + BasePath string `json:"basePath"` + ResourcePath string `json:"resourcePath"` // must start with / + Info Info `json:"info"` + Apis []Api `json:"apis,omitempty"` + Models ModelList `json:"models,omitempty"` + Produces []string `json:"produces,omitempty"` + Consumes []string `json:"consumes,omitempty"` + Authorizations []Authorization `json:"authorizations,omitempty"` +} + +// 5.2.2 API Object +type Api struct { + Path string `json:"path"` // relative or absolute, must start with / + Description string `json:"description"` + Operations []Operation `json:"operations,omitempty"` +} + +// 5.2.3 Operation Object +type Operation struct { + DataTypeFields + Method string `json:"method"` + Summary string `json:"summary,omitempty"` + Notes string `json:"notes,omitempty"` + Nickname string `json:"nickname"` + Authorizations []Authorization `json:"authorizations,omitempty"` + Parameters []Parameter `json:"parameters"` + ResponseMessages []ResponseMessage `json:"responseMessages,omitempty"` // optional + Produces []string `json:"produces,omitempty"` + Consumes []string `json:"consumes,omitempty"` + Deprecated string `json:"deprecated,omitempty"` +} + +// 5.2.4 Parameter Object +type Parameter struct { + DataTypeFields + ParamType string `json:"paramType"` // path,query,body,header,form + Name string `json:"name"` + Description string `json:"description"` + Required bool `json:"required"` + AllowMultiple bool `json:"allowMultiple"` +} + +// 5.2.5 Response Message Object +type ResponseMessage struct { + Code int `json:"code"` + Message string `json:"message"` + ResponseModel string `json:"responseModel,omitempty"` +} + +// 5.2.6, 5.2.7 Models Object +type Model struct { + Id string `json:"id"` + Description string `json:"description,omitempty"` + Required []string `json:"required,omitempty"` + Properties ModelPropertyList `json:"properties"` + SubTypes []string `json:"subTypes,omitempty"` + Discriminator string `json:"discriminator,omitempty"` +} + +// 5.2.8 Properties Object +type ModelProperty struct { + DataTypeFields + Description string `json:"description,omitempty"` +} + +// 5.2.10 +type Authorizations map[string]Authorization diff --git a/vendor/github.com/emicklei/go-restful-swagger12/swagger_builder.go b/vendor/github.com/emicklei/go-restful-swagger12/swagger_builder.go new file mode 100644 index 000000000..05a3c7e76 --- /dev/null +++ b/vendor/github.com/emicklei/go-restful-swagger12/swagger_builder.go @@ -0,0 +1,21 @@ +package swagger + +type SwaggerBuilder struct { + SwaggerService +} + +func NewSwaggerBuilder(config Config) *SwaggerBuilder { + return &SwaggerBuilder{*newSwaggerService(config)} +} + +func (sb SwaggerBuilder) ProduceListing() ResourceListing { + return sb.SwaggerService.produceListing() +} + +func (sb SwaggerBuilder) ProduceAllDeclarations() map[string]ApiDeclaration { + return sb.SwaggerService.produceAllDeclarations() +} + +func (sb SwaggerBuilder) ProduceDeclarations(route string) (*ApiDeclaration, bool) { + return sb.SwaggerService.produceDeclarations(route) +} diff --git a/vendor/github.com/emicklei/go-restful-swagger12/swagger_test.go b/vendor/github.com/emicklei/go-restful-swagger12/swagger_test.go new file mode 100644 index 000000000..3db904afc --- /dev/null +++ b/vendor/github.com/emicklei/go-restful-swagger12/swagger_test.go @@ -0,0 +1,318 @@ +package swagger + +import ( + "encoding/json" + "testing" + + "github.com/emicklei/go-restful" + "github.com/emicklei/go-restful-swagger12/test_package" +) + +func TestInfoStruct_Issue231(t *testing.T) { + config := Config{ + Info: Info{ + Title: "Title", + Description: "Description", + TermsOfServiceUrl: "http://example.com", + Contact: "example@example.com", + License: "License", + LicenseUrl: "http://example.com/license.txt", + }, + } + sws := newSwaggerService(config) + str, err := json.MarshalIndent(sws.produceListing(), "", " ") + if err != nil { + t.Fatal(err) + } + compareJson(t, string(str), ` + { + "apiVersion": "", + "swaggerVersion": "1.2", + "apis": null, + "info": { + "title": "Title", + "description": "Description", + "termsOfServiceUrl": "http://example.com", + "contact": "example@example.com", + "license": "License", + "licenseUrl": "http://example.com/license.txt" + } + } + `) +} + +// go test -v -test.run TestThatMultiplePathsOnRootAreHandled ...swagger +func TestThatMultiplePathsOnRootAreHandled(t *testing.T) { + ws1 := new(restful.WebService) + ws1.Route(ws1.GET("/_ping").To(dummy)) + ws1.Route(ws1.GET("/version").To(dummy)) + + cfg := Config{ + WebServicesUrl: "http://here.com", + ApiPath: "/apipath", + WebServices: []*restful.WebService{ws1}, + } + sws := newSwaggerService(cfg) + decl := sws.composeDeclaration(ws1, "/") + if got, want := len(decl.Apis), 2; got != want { + t.Errorf("got %v want %v", got, want) + } +} + +func TestWriteSamples(t *testing.T) { + ws1 := new(restful.WebService) + ws1.Route(ws1.GET("/object").To(dummy).Writes(test_package.TestStruct{})) + ws1.Route(ws1.GET("/array").To(dummy).Writes([]test_package.TestStruct{})) + ws1.Route(ws1.GET("/object_and_array").To(dummy).Writes(struct{ Abc test_package.TestStruct }{})) + + cfg := Config{ + WebServicesUrl: "http://here.com", + ApiPath: "/apipath", + WebServices: []*restful.WebService{ws1}, + } + sws := newSwaggerService(cfg) + + decl := sws.composeDeclaration(ws1, "/") + + str, err := json.MarshalIndent(decl.Apis, "", " ") + if err != nil { + t.Fatal(err) + } + + compareJson(t, string(str), ` + [ + { + "path": "/object", + "description": "", + "operations": [ + { + "type": "test_package.TestStruct", + "method": "GET", + "nickname": "dummy", + "parameters": [] + } + ] + }, + { + "path": "/array", + "description": "", + "operations": [ + { + "type": "array", + "items": { + "$ref": "test_package.TestStruct" + }, + "method": "GET", + "nickname": "dummy", + "parameters": [] + } + ] + }, + { + "path": "/object_and_array", + "description": "", + "operations": [ + { + "type": "struct { Abc test_package.TestStruct }", + "method": "GET", + "nickname": "dummy", + "parameters": [] + } + ] + } + ]`) + + str, err = json.MarshalIndent(decl.Models, "", " ") + if err != nil { + t.Fatal(err) + } + compareJson(t, string(str), ` + { + "test_package.TestStruct": { + "id": "test_package.TestStruct", + "required": [ + "TestField" + ], + "properties": { + "TestField": { + "type": "string" + } + } + }, + "||test_package.TestStruct": { + "id": "||test_package.TestStruct", + "properties": {} + }, + "struct { Abc test_package.TestStruct }": { + "id": "struct { Abc test_package.TestStruct }", + "required": [ + "Abc" + ], + "properties": { + "Abc": { + "$ref": "test_package.TestStruct" + } + } + } + }`) +} + +func TestRoutesWithCommonPart(t *testing.T) { + ws1 := new(restful.WebService) + ws1.Path("/") + ws1.Route(ws1.GET("/foobar").To(dummy).Writes(test_package.TestStruct{})) + ws1.Route(ws1.HEAD("/foobar").To(dummy).Writes(test_package.TestStruct{})) + ws1.Route(ws1.GET("/foo").To(dummy).Writes([]test_package.TestStruct{})) + ws1.Route(ws1.HEAD("/foo").To(dummy).Writes(test_package.TestStruct{})) + + cfg := Config{ + WebServicesUrl: "http://here.com", + ApiPath: "/apipath", + WebServices: []*restful.WebService{ws1}, + } + sws := newSwaggerService(cfg) + + decl := sws.composeDeclaration(ws1, "/foo") + + str, err := json.MarshalIndent(decl.Apis, "", " ") + if err != nil { + t.Fatal(err) + } + + compareJson(t, string(str), `[ + { + "path": "/foo", + "description": "", + "operations": [ + { + "type": "array", + "items": { + "$ref": "test_package.TestStruct" + }, + "method": "GET", + "nickname": "dummy", + "parameters": [] + }, + { + "type": "test_package.TestStruct", + "method": "HEAD", + "nickname": "dummy", + "parameters": [] + } + ] + } + ]`) +} + +// go test -v -test.run TestServiceToApi ...swagger +func TestServiceToApi(t *testing.T) { + ws := new(restful.WebService) + ws.Path("/tests") + ws.Consumes(restful.MIME_JSON) + ws.Produces(restful.MIME_XML) + ws.Route(ws.GET("/a").To(dummy).Writes(sample{})) + ws.Route(ws.PUT("/b").To(dummy).Writes(sample{})) + ws.Route(ws.POST("/c").To(dummy).Writes(sample{})) + ws.Route(ws.DELETE("/d").To(dummy).Writes(sample{})) + + ws.Route(ws.GET("/d").To(dummy).Writes(sample{})) + ws.Route(ws.PUT("/c").To(dummy).Writes(sample{})) + ws.Route(ws.POST("/b").To(dummy).Writes(sample{})) + ws.Route(ws.DELETE("/a").To(dummy).Writes(sample{})) + ws.ApiVersion("1.2.3") + cfg := Config{ + WebServicesUrl: "http://here.com", + ApiPath: "/apipath", + WebServices: []*restful.WebService{ws}, + PostBuildHandler: func(in *ApiDeclarationList) {}, + } + sws := newSwaggerService(cfg) + decl := sws.composeDeclaration(ws, "/tests") + // checks + if decl.ApiVersion != "1.2.3" { + t.Errorf("got %v want %v", decl.ApiVersion, "1.2.3") + } + if decl.BasePath != "http://here.com" { + t.Errorf("got %v want %v", decl.BasePath, "http://here.com") + } + if len(decl.Apis) != 4 { + t.Errorf("got %v want %v", len(decl.Apis), 4) + } + pathOrder := "" + for _, each := range decl.Apis { + pathOrder += each.Path + for _, other := range each.Operations { + pathOrder += other.Method + } + } + + if pathOrder != "/tests/aGETDELETE/tests/bPUTPOST/tests/cPOSTPUT/tests/dDELETEGET" { + t.Errorf("got %v want %v", pathOrder, "see test source") + } +} + +func dummy(i *restful.Request, o *restful.Response) {} + +// go test -v -test.run TestIssue78 ...swagger +type Response struct { + Code int + Users *[]User + Items *[]TestItem +} +type User struct { + Id, Name string +} +type TestItem struct { + Id, Name string +} + +// clear && go test -v -test.run TestComposeResponseMessages ...swagger +func TestComposeResponseMessages(t *testing.T) { + responseErrors := map[int]restful.ResponseError{} + responseErrors[400] = restful.ResponseError{Code: 400, Message: "Bad Request", Model: TestItem{}} + route := restful.Route{ResponseErrors: responseErrors} + decl := new(ApiDeclaration) + decl.Models = ModelList{} + msgs := composeResponseMessages(route, decl, &Config{}) + if msgs[0].ResponseModel != "swagger.TestItem" { + t.Errorf("got %s want swagger.TestItem", msgs[0].ResponseModel) + } +} + +func TestIssue78(t *testing.T) { + sws := newSwaggerService(Config{}) + models := new(ModelList) + sws.addModelFromSampleTo(&Operation{}, true, Response{Items: &[]TestItem{}}, models) + model, ok := models.At("swagger.Response") + if !ok { + t.Fatal("missing response model") + } + if "swagger.Response" != model.Id { + t.Fatal("wrong model id:" + model.Id) + } + code, ok := model.Properties.At("Code") + if !ok { + t.Fatal("missing code") + } + if "integer" != *code.Type { + t.Fatal("wrong code type:" + *code.Type) + } + items, ok := model.Properties.At("Items") + if !ok { + t.Fatal("missing items") + } + if "array" != *items.Type { + t.Fatal("wrong items type:" + *items.Type) + } + items_items := items.Items + if items_items == nil { + t.Fatal("missing items->items") + } + ref := items_items.Ref + if ref == nil { + t.Fatal("missing $ref") + } + if *ref != "swagger.TestItem" { + t.Fatal("wrong $ref:" + *ref) + } +} diff --git a/vendor/github.com/emicklei/go-restful-swagger12/swagger_webservice.go b/vendor/github.com/emicklei/go-restful-swagger12/swagger_webservice.go new file mode 100644 index 000000000..d90623120 --- /dev/null +++ b/vendor/github.com/emicklei/go-restful-swagger12/swagger_webservice.go @@ -0,0 +1,443 @@ +package swagger + +import ( + "fmt" + + "github.com/emicklei/go-restful" + // "github.com/emicklei/hopwatch" + "net/http" + "reflect" + "sort" + "strings" + + "github.com/emicklei/go-restful/log" +) + +type SwaggerService struct { + config Config + apiDeclarationMap *ApiDeclarationList +} + +func newSwaggerService(config Config) *SwaggerService { + sws := &SwaggerService{ + config: config, + apiDeclarationMap: new(ApiDeclarationList)} + + // Build all ApiDeclarations + for _, each := range config.WebServices { + rootPath := each.RootPath() + // skip the api service itself + if rootPath != config.ApiPath { + if rootPath == "" || rootPath == "/" { + // use routes + for _, route := range each.Routes() { + entry := staticPathFromRoute(route) + _, exists := sws.apiDeclarationMap.At(entry) + if !exists { + sws.apiDeclarationMap.Put(entry, sws.composeDeclaration(each, entry)) + } + } + } else { // use root path + sws.apiDeclarationMap.Put(each.RootPath(), sws.composeDeclaration(each, each.RootPath())) + } + } + } + + // if specified then call the PostBuilderHandler + if config.PostBuildHandler != nil { + config.PostBuildHandler(sws.apiDeclarationMap) + } + return sws +} + +// LogInfo is the function that is called when this package needs to log. It defaults to log.Printf +var LogInfo = func(format string, v ...interface{}) { + // use the restful package-wide logger + log.Printf(format, v...) +} + +// InstallSwaggerService add the WebService that provides the API documentation of all services +// conform the Swagger documentation specifcation. (https://github.com/wordnik/swagger-core/wiki). +func InstallSwaggerService(aSwaggerConfig Config) { + RegisterSwaggerService(aSwaggerConfig, restful.DefaultContainer) +} + +// RegisterSwaggerService add the WebService that provides the API documentation of all services +// conform the Swagger documentation specifcation. (https://github.com/wordnik/swagger-core/wiki). +func RegisterSwaggerService(config Config, wsContainer *restful.Container) { + sws := newSwaggerService(config) + ws := new(restful.WebService) + ws.Path(config.ApiPath) + ws.Produces(restful.MIME_JSON) + if config.DisableCORS { + ws.Filter(enableCORS) + } + ws.Route(ws.GET("/").To(sws.getListing)) + ws.Route(ws.GET("/{a}").To(sws.getDeclarations)) + ws.Route(ws.GET("/{a}/{b}").To(sws.getDeclarations)) + ws.Route(ws.GET("/{a}/{b}/{c}").To(sws.getDeclarations)) + ws.Route(ws.GET("/{a}/{b}/{c}/{d}").To(sws.getDeclarations)) + ws.Route(ws.GET("/{a}/{b}/{c}/{d}/{e}").To(sws.getDeclarations)) + ws.Route(ws.GET("/{a}/{b}/{c}/{d}/{e}/{f}").To(sws.getDeclarations)) + ws.Route(ws.GET("/{a}/{b}/{c}/{d}/{e}/{f}/{g}").To(sws.getDeclarations)) + LogInfo("[restful/swagger] listing is available at %v%v", config.WebServicesUrl, config.ApiPath) + wsContainer.Add(ws) + + // Check paths for UI serving + if config.StaticHandler == nil && config.SwaggerFilePath != "" && config.SwaggerPath != "" { + swaggerPathSlash := config.SwaggerPath + // path must end with slash / + if "/" != config.SwaggerPath[len(config.SwaggerPath)-1:] { + LogInfo("[restful/swagger] use corrected SwaggerPath ; must end with slash (/)") + swaggerPathSlash += "/" + } + + LogInfo("[restful/swagger] %v%v is mapped to folder %v", config.WebServicesUrl, swaggerPathSlash, config.SwaggerFilePath) + wsContainer.Handle(swaggerPathSlash, http.StripPrefix(swaggerPathSlash, http.FileServer(http.Dir(config.SwaggerFilePath)))) + + //if we define a custom static handler use it + } else if config.StaticHandler != nil && config.SwaggerPath != "" { + swaggerPathSlash := config.SwaggerPath + // path must end with slash / + if "/" != config.SwaggerPath[len(config.SwaggerPath)-1:] { + LogInfo("[restful/swagger] use corrected SwaggerFilePath ; must end with slash (/)") + swaggerPathSlash += "/" + + } + LogInfo("[restful/swagger] %v%v is mapped to custom Handler %T", config.WebServicesUrl, swaggerPathSlash, config.StaticHandler) + wsContainer.Handle(swaggerPathSlash, config.StaticHandler) + + } else { + LogInfo("[restful/swagger] Swagger(File)Path is empty ; no UI is served") + } +} + +func staticPathFromRoute(r restful.Route) string { + static := r.Path + bracket := strings.Index(static, "{") + if bracket <= 1 { // result cannot be empty + return static + } + if bracket != -1 { + static = r.Path[:bracket] + } + if strings.HasSuffix(static, "/") { + return static[:len(static)-1] + } else { + return static + } +} + +func enableCORS(req *restful.Request, resp *restful.Response, chain *restful.FilterChain) { + if origin := req.HeaderParameter(restful.HEADER_Origin); origin != "" { + // prevent duplicate header + if len(resp.Header().Get(restful.HEADER_AccessControlAllowOrigin)) == 0 { + resp.AddHeader(restful.HEADER_AccessControlAllowOrigin, origin) + } + } + chain.ProcessFilter(req, resp) +} + +func (sws SwaggerService) getListing(req *restful.Request, resp *restful.Response) { + listing := sws.produceListing() + resp.WriteAsJson(listing) +} + +func (sws SwaggerService) produceListing() ResourceListing { + listing := ResourceListing{SwaggerVersion: swaggerVersion, ApiVersion: sws.config.ApiVersion, Info: sws.config.Info} + sws.apiDeclarationMap.Do(func(k string, v ApiDeclaration) { + ref := Resource{Path: k} + if len(v.Apis) > 0 { // use description of first (could still be empty) + ref.Description = v.Apis[0].Description + } + listing.Apis = append(listing.Apis, ref) + }) + return listing +} + +func (sws SwaggerService) getDeclarations(req *restful.Request, resp *restful.Response) { + decl, ok := sws.produceDeclarations(composeRootPath(req)) + if !ok { + resp.WriteErrorString(http.StatusNotFound, "ApiDeclaration not found") + return + } + // unless WebServicesUrl is given + if len(sws.config.WebServicesUrl) == 0 { + // update base path from the actual request + // TODO how to detect https? assume http for now + var host string + // X-Forwarded-Host or Host or Request.Host + hostvalues, ok := req.Request.Header["X-Forwarded-Host"] // apache specific? + if !ok || len(hostvalues) == 0 { + forwarded, ok := req.Request.Header["Host"] // without reverse-proxy + if !ok || len(forwarded) == 0 { + // fallback to Host field + host = req.Request.Host + } else { + host = forwarded[0] + } + } else { + host = hostvalues[0] + } + // inspect Referer for the scheme (http vs https) + scheme := "http" + if referer := req.Request.Header["Referer"]; len(referer) > 0 { + if strings.HasPrefix(referer[0], "https") { + scheme = "https" + } + } + decl.BasePath = fmt.Sprintf("%s://%s", scheme, host) + } + resp.WriteAsJson(decl) +} + +func (sws SwaggerService) produceAllDeclarations() map[string]ApiDeclaration { + decls := map[string]ApiDeclaration{} + sws.apiDeclarationMap.Do(func(k string, v ApiDeclaration) { + decls[k] = v + }) + return decls +} + +func (sws SwaggerService) produceDeclarations(route string) (*ApiDeclaration, bool) { + decl, ok := sws.apiDeclarationMap.At(route) + if !ok { + return nil, false + } + decl.BasePath = sws.config.WebServicesUrl + return &decl, true +} + +// composeDeclaration uses all routes and parameters to create a ApiDeclaration +func (sws SwaggerService) composeDeclaration(ws *restful.WebService, pathPrefix string) ApiDeclaration { + decl := ApiDeclaration{ + SwaggerVersion: swaggerVersion, + BasePath: sws.config.WebServicesUrl, + ResourcePath: pathPrefix, + Models: ModelList{}, + ApiVersion: ws.Version()} + + // collect any path parameters + rootParams := []Parameter{} + for _, param := range ws.PathParameters() { + rootParams = append(rootParams, asSwaggerParameter(param.Data())) + } + // aggregate by path + pathToRoutes := newOrderedRouteMap() + for _, other := range ws.Routes() { + if strings.HasPrefix(other.Path, pathPrefix) { + if len(pathPrefix) > 1 && len(other.Path) > len(pathPrefix) && other.Path[len(pathPrefix)] != '/' { + continue + } + pathToRoutes.Add(other.Path, other) + } + } + pathToRoutes.Do(func(path string, routes []restful.Route) { + api := Api{Path: strings.TrimSuffix(withoutWildcard(path), "/"), Description: ws.Documentation()} + voidString := "void" + for _, route := range routes { + operation := Operation{ + Method: route.Method, + Summary: route.Doc, + Notes: route.Notes, + // Type gets overwritten if there is a write sample + DataTypeFields: DataTypeFields{Type: &voidString}, + Parameters: []Parameter{}, + Nickname: route.Operation, + ResponseMessages: composeResponseMessages(route, &decl, &sws.config)} + + operation.Consumes = route.Consumes + operation.Produces = route.Produces + + // share root params if any + for _, swparam := range rootParams { + operation.Parameters = append(operation.Parameters, swparam) + } + // route specific params + for _, param := range route.ParameterDocs { + operation.Parameters = append(operation.Parameters, asSwaggerParameter(param.Data())) + } + + sws.addModelsFromRouteTo(&operation, route, &decl) + api.Operations = append(api.Operations, operation) + } + decl.Apis = append(decl.Apis, api) + }) + return decl +} + +func withoutWildcard(path string) string { + if strings.HasSuffix(path, ":*}") { + return path[0:len(path)-3] + "}" + } + return path +} + +// composeResponseMessages takes the ResponseErrors (if any) and creates ResponseMessages from them. +func composeResponseMessages(route restful.Route, decl *ApiDeclaration, config *Config) (messages []ResponseMessage) { + if route.ResponseErrors == nil { + return messages + } + // sort by code + codes := sort.IntSlice{} + for code := range route.ResponseErrors { + codes = append(codes, code) + } + codes.Sort() + for _, code := range codes { + each := route.ResponseErrors[code] + message := ResponseMessage{ + Code: code, + Message: each.Message, + } + if each.Model != nil { + st := reflect.TypeOf(each.Model) + isCollection, st := detectCollectionType(st) + // collection cannot be in responsemodel + if !isCollection { + modelName := modelBuilder{}.keyFrom(st) + modelBuilder{Models: &decl.Models, Config: config}.addModel(st, "") + message.ResponseModel = modelName + } + } + messages = append(messages, message) + } + return +} + +// addModelsFromRoute takes any read or write sample from the Route and creates a Swagger model from it. +func (sws SwaggerService) addModelsFromRouteTo(operation *Operation, route restful.Route, decl *ApiDeclaration) { + if route.ReadSample != nil { + sws.addModelFromSampleTo(operation, false, route.ReadSample, &decl.Models) + } + if route.WriteSample != nil { + sws.addModelFromSampleTo(operation, true, route.WriteSample, &decl.Models) + } +} + +func detectCollectionType(st reflect.Type) (bool, reflect.Type) { + isCollection := false + if st.Kind() == reflect.Slice || st.Kind() == reflect.Array { + st = st.Elem() + isCollection = true + } else { + if st.Kind() == reflect.Ptr { + if st.Elem().Kind() == reflect.Slice || st.Elem().Kind() == reflect.Array { + st = st.Elem().Elem() + isCollection = true + } + } + } + return isCollection, st +} + +// addModelFromSample creates and adds (or overwrites) a Model from a sample resource +func (sws SwaggerService) addModelFromSampleTo(operation *Operation, isResponse bool, sample interface{}, models *ModelList) { + mb := modelBuilder{Models: models, Config: &sws.config} + if isResponse { + sampleType, items := asDataType(sample, &sws.config) + operation.Type = sampleType + operation.Items = items + } + mb.addModelFrom(sample) +} + +func asSwaggerParameter(param restful.ParameterData) Parameter { + return Parameter{ + DataTypeFields: DataTypeFields{ + Type: ¶m.DataType, + Format: asFormat(param.DataType, param.DataFormat), + DefaultValue: Special(param.DefaultValue), + }, + Name: param.Name, + Description: param.Description, + ParamType: asParamType(param.Kind), + + Required: param.Required} +} + +// Between 1..7 path parameters is supported +func composeRootPath(req *restful.Request) string { + path := "/" + req.PathParameter("a") + b := req.PathParameter("b") + if b == "" { + return path + } + path = path + "/" + b + c := req.PathParameter("c") + if c == "" { + return path + } + path = path + "/" + c + d := req.PathParameter("d") + if d == "" { + return path + } + path = path + "/" + d + e := req.PathParameter("e") + if e == "" { + return path + } + path = path + "/" + e + f := req.PathParameter("f") + if f == "" { + return path + } + path = path + "/" + f + g := req.PathParameter("g") + if g == "" { + return path + } + return path + "/" + g +} + +func asFormat(dataType string, dataFormat string) string { + if dataFormat != "" { + return dataFormat + } + return "" // TODO +} + +func asParamType(kind int) string { + switch { + case kind == restful.PathParameterKind: + return "path" + case kind == restful.QueryParameterKind: + return "query" + case kind == restful.BodyParameterKind: + return "body" + case kind == restful.HeaderParameterKind: + return "header" + case kind == restful.FormParameterKind: + return "form" + } + return "" +} + +func asDataType(any interface{}, config *Config) (*string, *Item) { + // If it's not a collection, return the suggested model name + st := reflect.TypeOf(any) + isCollection, st := detectCollectionType(st) + modelName := modelBuilder{}.keyFrom(st) + // if it's not a collection we are done + if !isCollection { + return &modelName, nil + } + + // XXX: This is not very elegant + // We create an Item object referring to the given model + models := ModelList{} + mb := modelBuilder{Models: &models, Config: config} + mb.addModelFrom(any) + + elemTypeName := mb.getElementTypeName(modelName, "", st) + item := new(Item) + if mb.isPrimitiveType(elemTypeName) { + mapped := mb.jsonSchemaType(elemTypeName) + item.Type = &mapped + } else { + item.Ref = &elemTypeName + } + tmp := "array" + return &tmp, item +} diff --git a/vendor/github.com/emicklei/go-restful-swagger12/test_package/struct.go b/vendor/github.com/emicklei/go-restful-swagger12/test_package/struct.go new file mode 100644 index 000000000..b9a6f9308 --- /dev/null +++ b/vendor/github.com/emicklei/go-restful-swagger12/test_package/struct.go @@ -0,0 +1,5 @@ +package test_package + +type TestStruct struct { + TestField string +} diff --git a/vendor/github.com/emicklei/go-restful-swagger12/utils_test.go b/vendor/github.com/emicklei/go-restful-swagger12/utils_test.go new file mode 100644 index 000000000..220289af3 --- /dev/null +++ b/vendor/github.com/emicklei/go-restful-swagger12/utils_test.go @@ -0,0 +1,86 @@ +package swagger + +import ( + "bytes" + "encoding/json" + "fmt" + "reflect" + "strings" + "testing" +) + +func testJsonFromStructWithConfig(t *testing.T, sample interface{}, expectedJson string, config *Config) bool { + m := modelsFromStructWithConfig(sample, config) + data, _ := json.MarshalIndent(m, " ", " ") + return compareJson(t, string(data), expectedJson) +} + +func modelsFromStructWithConfig(sample interface{}, config *Config) *ModelList { + models := new(ModelList) + builder := modelBuilder{Models: models, Config: config} + builder.addModelFrom(sample) + return models +} + +func testJsonFromStruct(t *testing.T, sample interface{}, expectedJson string) bool { + return testJsonFromStructWithConfig(t, sample, expectedJson, &Config{}) +} + +func modelsFromStruct(sample interface{}) *ModelList { + return modelsFromStructWithConfig(sample, &Config{}) +} + +func compareJson(t *testing.T, actualJsonAsString string, expectedJsonAsString string) bool { + success := false + var actualMap map[string]interface{} + json.Unmarshal([]byte(actualJsonAsString), &actualMap) + var expectedMap map[string]interface{} + err := json.Unmarshal([]byte(expectedJsonAsString), &expectedMap) + if err != nil { + var actualArray []interface{} + json.Unmarshal([]byte(actualJsonAsString), &actualArray) + var expectedArray []interface{} + err := json.Unmarshal([]byte(expectedJsonAsString), &expectedArray) + success = reflect.DeepEqual(actualArray, expectedArray) + if err != nil { + t.Fatalf("Unparsable expected JSON: %s, actual: %v, expected: %v", err, actualJsonAsString, expectedJsonAsString) + } + } else { + success = reflect.DeepEqual(actualMap, expectedMap) + } + if !success { + t.Log("---- expected -----") + t.Log(withLineNumbers(expectedJsonAsString)) + t.Log("---- actual -----") + t.Log(withLineNumbers(actualJsonAsString)) + t.Log("---- raw -----") + t.Log(actualJsonAsString) + t.Error("there are differences") + return false + } + return true +} + +func indexOfNonMatchingLine(actual, expected string) int { + a := strings.Split(actual, "\n") + e := strings.Split(expected, "\n") + size := len(a) + if len(e) < len(a) { + size = len(e) + } + for i := 0; i < size; i++ { + if a[i] != e[i] { + return i + } + } + return -1 +} + +func withLineNumbers(content string) string { + var buffer bytes.Buffer + lines := strings.Split(content, "\n") + for i, each := range lines { + buffer.WriteString(fmt.Sprintf("%d:%s\n", i, each)) + } + return buffer.String() +} diff --git a/vendor/github.com/fsnotify/fsnotify/.editorconfig b/vendor/github.com/fsnotify/fsnotify/.editorconfig new file mode 100644 index 000000000..ba49e3c23 --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/.editorconfig @@ -0,0 +1,5 @@ +root = true + +[*] +indent_style = tab +indent_size = 4 diff --git a/vendor/github.com/fsnotify/fsnotify/.github/ISSUE_TEMPLATE.md b/vendor/github.com/fsnotify/fsnotify/.github/ISSUE_TEMPLATE.md new file mode 100644 index 000000000..4ad1aed8f --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/.github/ISSUE_TEMPLATE.md @@ -0,0 +1,11 @@ +Before reporting an issue, please ensure you are using the latest release of fsnotify. + +### Which operating system (GOOS) and version are you using? + +Linux: lsb_release -a +macOS: sw_vers +Windows: systeminfo | findstr /B /C:OS + +### Please describe the issue that occurred. + +### Are you able to reproduce the issue? Please provide steps to reproduce and a code sample if possible. diff --git a/vendor/github.com/fsnotify/fsnotify/.github/PULL_REQUEST_TEMPLATE.md b/vendor/github.com/fsnotify/fsnotify/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 000000000..64ddf7cef --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,8 @@ +#### What does this pull request do? + + +#### Where should the reviewer start? + + +#### How should this be manually tested? + diff --git a/vendor/github.com/fsnotify/fsnotify/.gitignore b/vendor/github.com/fsnotify/fsnotify/.gitignore new file mode 100644 index 000000000..4cd0cbaf4 --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/.gitignore @@ -0,0 +1,6 @@ +# Setup a Global .gitignore for OS and editor generated files: +# https://help.github.com/articles/ignoring-files +# git config --global core.excludesfile ~/.gitignore_global + +.vagrant +*.sublime-project diff --git a/vendor/github.com/fsnotify/fsnotify/.travis.yml b/vendor/github.com/fsnotify/fsnotify/.travis.yml new file mode 100644 index 000000000..3a5c933bc --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/.travis.yml @@ -0,0 +1,28 @@ +sudo: false +language: go + +go: + - 1.6.3 + - tip + +matrix: + allow_failures: + - go: tip + +before_script: + - go get -u github.com/golang/lint/golint + +script: + - go test -v --race ./... + +after_script: + - test -z "$(gofmt -s -l -w . | tee /dev/stderr)" + - test -z "$(golint ./... | tee /dev/stderr)" + - go vet ./... + +os: + - linux + - osx + +notifications: + email: false diff --git a/vendor/github.com/fsnotify/fsnotify/AUTHORS b/vendor/github.com/fsnotify/fsnotify/AUTHORS new file mode 100644 index 000000000..0a5bf8f61 --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/AUTHORS @@ -0,0 +1,46 @@ +# Names should be added to this file as +# Name or Organization +# The email address is not required for organizations. + +# You can update this list using the following command: +# +# $ git shortlog -se | awk '{print $2 " " $3 " " $4}' + +# Please keep the list sorted. + +Adrien Bustany +Amit Krishnan +Bjørn Erik Pedersen +Bruno Bigras +Caleb Spare +Case Nelson +Chris Howey +Christoffer Buchholz +Daniel Wagner-Hall +Dave Cheney +Evan Phoenix +Francisco Souza +Hari haran +John C Barstow +Kelvin Fo +Ken-ichirou MATSUZAWA +Matt Layher +Nathan Youngman +Patrick +Paul Hammond +Pawel Knap +Pieter Droogendijk +Pursuit92 +Riku Voipio +Rob Figueiredo +Slawek Ligus +Soge Zhang +Tiffany Jernigan +Tilak Sharma +Travis Cline +Tudor Golubenco +Yukang +bronze1man +debrando +henrikedwards +铁哥 diff --git a/vendor/github.com/fsnotify/fsnotify/CHANGELOG.md b/vendor/github.com/fsnotify/fsnotify/CHANGELOG.md new file mode 100644 index 000000000..40d7660d5 --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/CHANGELOG.md @@ -0,0 +1,307 @@ +# Changelog + +## v1.4.2 / 2016-10-10 + +* Linux: use InotifyInit1 with IN_CLOEXEC to stop leaking a file descriptor to a child process when using fork/exec [#178](https://github.com/fsnotify/fsnotify/pull/178) (thanks @pattyshack) + +## v1.4.1 / 2016-10-04 + +* Fix flaky inotify stress test on Linux [#177](https://github.com/fsnotify/fsnotify/pull/177) (thanks @pattyshack) + +## v1.4.0 / 2016-10-01 + +* add a String() method to Event.Op [#165](https://github.com/fsnotify/fsnotify/pull/165) (thanks @oozie) + +## v1.3.1 / 2016-06-28 + +* Windows: fix for double backslash when watching the root of a drive [#151](https://github.com/fsnotify/fsnotify/issues/151) (thanks @brunoqc) + +## v1.3.0 / 2016-04-19 + +* Support linux/arm64 by [patching](https://go-review.googlesource.com/#/c/21971/) x/sys/unix and switching to to it from syscall (thanks @suihkulokki) [#135](https://github.com/fsnotify/fsnotify/pull/135) + +## v1.2.10 / 2016-03-02 + +* Fix golint errors in windows.go [#121](https://github.com/fsnotify/fsnotify/pull/121) (thanks @tiffanyfj) + +## v1.2.9 / 2016-01-13 + +kqueue: Fix logic for CREATE after REMOVE [#111](https://github.com/fsnotify/fsnotify/pull/111) (thanks @bep) + +## v1.2.8 / 2015-12-17 + +* kqueue: fix race condition in Close [#105](https://github.com/fsnotify/fsnotify/pull/105) (thanks @djui for reporting the issue and @ppknap for writing a failing test) +* inotify: fix race in test +* enable race detection for continuous integration (Linux, Mac, Windows) + +## v1.2.5 / 2015-10-17 + +* inotify: use epoll_create1 for arm64 support (requires Linux 2.6.27 or later) [#100](https://github.com/fsnotify/fsnotify/pull/100) (thanks @suihkulokki) +* inotify: fix path leaks [#73](https://github.com/fsnotify/fsnotify/pull/73) (thanks @chamaken) +* kqueue: watch for rename events on subdirectories [#83](https://github.com/fsnotify/fsnotify/pull/83) (thanks @guotie) +* kqueue: avoid infinite loops from symlinks cycles [#101](https://github.com/fsnotify/fsnotify/pull/101) (thanks @illicitonion) + +## v1.2.1 / 2015-10-14 + +* kqueue: don't watch named pipes [#98](https://github.com/fsnotify/fsnotify/pull/98) (thanks @evanphx) + +## v1.2.0 / 2015-02-08 + +* inotify: use epoll to wake up readEvents [#66](https://github.com/fsnotify/fsnotify/pull/66) (thanks @PieterD) +* inotify: closing watcher should now always shut down goroutine [#63](https://github.com/fsnotify/fsnotify/pull/63) (thanks @PieterD) +* kqueue: close kqueue after removing watches, fixes [#59](https://github.com/fsnotify/fsnotify/issues/59) + +## v1.1.1 / 2015-02-05 + +* inotify: Retry read on EINTR [#61](https://github.com/fsnotify/fsnotify/issues/61) (thanks @PieterD) + +## v1.1.0 / 2014-12-12 + +* kqueue: rework internals [#43](https://github.com/fsnotify/fsnotify/pull/43) + * add low-level functions + * only need to store flags on directories + * less mutexes [#13](https://github.com/fsnotify/fsnotify/issues/13) + * done can be an unbuffered channel + * remove calls to os.NewSyscallError +* More efficient string concatenation for Event.String() [#52](https://github.com/fsnotify/fsnotify/pull/52) (thanks @mdlayher) +* kqueue: fix regression in rework causing subdirectories to be watched [#48](https://github.com/fsnotify/fsnotify/issues/48) +* kqueue: cleanup internal watch before sending remove event [#51](https://github.com/fsnotify/fsnotify/issues/51) + +## v1.0.4 / 2014-09-07 + +* kqueue: add dragonfly to the build tags. +* Rename source code files, rearrange code so exported APIs are at the top. +* Add done channel to example code. [#37](https://github.com/fsnotify/fsnotify/pull/37) (thanks @chenyukang) + +## v1.0.3 / 2014-08-19 + +* [Fix] Windows MOVED_TO now translates to Create like on BSD and Linux. [#36](https://github.com/fsnotify/fsnotify/issues/36) + +## v1.0.2 / 2014-08-17 + +* [Fix] Missing create events on OS X. [#14](https://github.com/fsnotify/fsnotify/issues/14) (thanks @zhsso) +* [Fix] Make ./path and path equivalent. (thanks @zhsso) + +## v1.0.0 / 2014-08-15 + +* [API] Remove AddWatch on Windows, use Add. +* Improve documentation for exported identifiers. [#30](https://github.com/fsnotify/fsnotify/issues/30) +* Minor updates based on feedback from golint. + +## dev / 2014-07-09 + +* Moved to [github.com/fsnotify/fsnotify](https://github.com/fsnotify/fsnotify). +* Use os.NewSyscallError instead of returning errno (thanks @hariharan-uno) + +## dev / 2014-07-04 + +* kqueue: fix incorrect mutex used in Close() +* Update example to demonstrate usage of Op. + +## dev / 2014-06-28 + +* [API] Don't set the Write Op for attribute notifications [#4](https://github.com/fsnotify/fsnotify/issues/4) +* Fix for String() method on Event (thanks Alex Brainman) +* Don't build on Plan 9 or Solaris (thanks @4ad) + +## dev / 2014-06-21 + +* Events channel of type Event rather than *Event. +* [internal] use syscall constants directly for inotify and kqueue. +* [internal] kqueue: rename events to kevents and fileEvent to event. + +## dev / 2014-06-19 + +* Go 1.3+ required on Windows (uses syscall.ERROR_MORE_DATA internally). +* [internal] remove cookie from Event struct (unused). +* [internal] Event struct has the same definition across every OS. +* [internal] remove internal watch and removeWatch methods. + +## dev / 2014-06-12 + +* [API] Renamed Watch() to Add() and RemoveWatch() to Remove(). +* [API] Pluralized channel names: Events and Errors. +* [API] Renamed FileEvent struct to Event. +* [API] Op constants replace methods like IsCreate(). + +## dev / 2014-06-12 + +* Fix data race on kevent buffer (thanks @tilaks) [#98](https://github.com/howeyc/fsnotify/pull/98) + +## dev / 2014-05-23 + +* [API] Remove current implementation of WatchFlags. + * current implementation doesn't take advantage of OS for efficiency + * provides little benefit over filtering events as they are received, but has extra bookkeeping and mutexes + * no tests for the current implementation + * not fully implemented on Windows [#93](https://github.com/howeyc/fsnotify/issues/93#issuecomment-39285195) + +## v0.9.3 / 2014-12-31 + +* kqueue: cleanup internal watch before sending remove event [#51](https://github.com/fsnotify/fsnotify/issues/51) + +## v0.9.2 / 2014-08-17 + +* [Backport] Fix missing create events on OS X. [#14](https://github.com/fsnotify/fsnotify/issues/14) (thanks @zhsso) + +## v0.9.1 / 2014-06-12 + +* Fix data race on kevent buffer (thanks @tilaks) [#98](https://github.com/howeyc/fsnotify/pull/98) + +## v0.9.0 / 2014-01-17 + +* IsAttrib() for events that only concern a file's metadata [#79][] (thanks @abustany) +* [Fix] kqueue: fix deadlock [#77][] (thanks @cespare) +* [NOTICE] Development has moved to `code.google.com/p/go.exp/fsnotify` in preparation for inclusion in the Go standard library. + +## v0.8.12 / 2013-11-13 + +* [API] Remove FD_SET and friends from Linux adapter + +## v0.8.11 / 2013-11-02 + +* [Doc] Add Changelog [#72][] (thanks @nathany) +* [Doc] Spotlight and double modify events on OS X [#62][] (reported by @paulhammond) + +## v0.8.10 / 2013-10-19 + +* [Fix] kqueue: remove file watches when parent directory is removed [#71][] (reported by @mdwhatcott) +* [Fix] kqueue: race between Close and readEvents [#70][] (reported by @bernerdschaefer) +* [Doc] specify OS-specific limits in README (thanks @debrando) + +## v0.8.9 / 2013-09-08 + +* [Doc] Contributing (thanks @nathany) +* [Doc] update package path in example code [#63][] (thanks @paulhammond) +* [Doc] GoCI badge in README (Linux only) [#60][] +* [Doc] Cross-platform testing with Vagrant [#59][] (thanks @nathany) + +## v0.8.8 / 2013-06-17 + +* [Fix] Windows: handle `ERROR_MORE_DATA` on Windows [#49][] (thanks @jbowtie) + +## v0.8.7 / 2013-06-03 + +* [API] Make syscall flags internal +* [Fix] inotify: ignore event changes +* [Fix] race in symlink test [#45][] (reported by @srid) +* [Fix] tests on Windows +* lower case error messages + +## v0.8.6 / 2013-05-23 + +* kqueue: Use EVT_ONLY flag on Darwin +* [Doc] Update README with full example + +## v0.8.5 / 2013-05-09 + +* [Fix] inotify: allow monitoring of "broken" symlinks (thanks @tsg) + +## v0.8.4 / 2013-04-07 + +* [Fix] kqueue: watch all file events [#40][] (thanks @ChrisBuchholz) + +## v0.8.3 / 2013-03-13 + +* [Fix] inoitfy/kqueue memory leak [#36][] (reported by @nbkolchin) +* [Fix] kqueue: use fsnFlags for watching a directory [#33][] (reported by @nbkolchin) + +## v0.8.2 / 2013-02-07 + +* [Doc] add Authors +* [Fix] fix data races for map access [#29][] (thanks @fsouza) + +## v0.8.1 / 2013-01-09 + +* [Fix] Windows path separators +* [Doc] BSD License + +## v0.8.0 / 2012-11-09 + +* kqueue: directory watching improvements (thanks @vmirage) +* inotify: add `IN_MOVED_TO` [#25][] (requested by @cpisto) +* [Fix] kqueue: deleting watched directory [#24][] (reported by @jakerr) + +## v0.7.4 / 2012-10-09 + +* [Fix] inotify: fixes from https://codereview.appspot.com/5418045/ (ugorji) +* [Fix] kqueue: preserve watch flags when watching for delete [#21][] (reported by @robfig) +* [Fix] kqueue: watch the directory even if it isn't a new watch (thanks @robfig) +* [Fix] kqueue: modify after recreation of file + +## v0.7.3 / 2012-09-27 + +* [Fix] kqueue: watch with an existing folder inside the watched folder (thanks @vmirage) +* [Fix] kqueue: no longer get duplicate CREATE events + +## v0.7.2 / 2012-09-01 + +* kqueue: events for created directories + +## v0.7.1 / 2012-07-14 + +* [Fix] for renaming files + +## v0.7.0 / 2012-07-02 + +* [Feature] FSNotify flags +* [Fix] inotify: Added file name back to event path + +## v0.6.0 / 2012-06-06 + +* kqueue: watch files after directory created (thanks @tmc) + +## v0.5.1 / 2012-05-22 + +* [Fix] inotify: remove all watches before Close() + +## v0.5.0 / 2012-05-03 + +* [API] kqueue: return errors during watch instead of sending over channel +* kqueue: match symlink behavior on Linux +* inotify: add `DELETE_SELF` (requested by @taralx) +* [Fix] kqueue: handle EINTR (reported by @robfig) +* [Doc] Godoc example [#1][] (thanks @davecheney) + +## v0.4.0 / 2012-03-30 + +* Go 1 released: build with go tool +* [Feature] Windows support using winfsnotify +* Windows does not have attribute change notifications +* Roll attribute notifications into IsModify + +## v0.3.0 / 2012-02-19 + +* kqueue: add files when watch directory + +## v0.2.0 / 2011-12-30 + +* update to latest Go weekly code + +## v0.1.0 / 2011-10-19 + +* kqueue: add watch on file creation to match inotify +* kqueue: create file event +* inotify: ignore `IN_IGNORED` events +* event String() +* linux: common FileEvent functions +* initial commit + +[#79]: https://github.com/howeyc/fsnotify/pull/79 +[#77]: https://github.com/howeyc/fsnotify/pull/77 +[#72]: https://github.com/howeyc/fsnotify/issues/72 +[#71]: https://github.com/howeyc/fsnotify/issues/71 +[#70]: https://github.com/howeyc/fsnotify/issues/70 +[#63]: https://github.com/howeyc/fsnotify/issues/63 +[#62]: https://github.com/howeyc/fsnotify/issues/62 +[#60]: https://github.com/howeyc/fsnotify/issues/60 +[#59]: https://github.com/howeyc/fsnotify/issues/59 +[#49]: https://github.com/howeyc/fsnotify/issues/49 +[#45]: https://github.com/howeyc/fsnotify/issues/45 +[#40]: https://github.com/howeyc/fsnotify/issues/40 +[#36]: https://github.com/howeyc/fsnotify/issues/36 +[#33]: https://github.com/howeyc/fsnotify/issues/33 +[#29]: https://github.com/howeyc/fsnotify/issues/29 +[#25]: https://github.com/howeyc/fsnotify/issues/25 +[#24]: https://github.com/howeyc/fsnotify/issues/24 +[#21]: https://github.com/howeyc/fsnotify/issues/21 diff --git a/vendor/github.com/fsnotify/fsnotify/CONTRIBUTING.md b/vendor/github.com/fsnotify/fsnotify/CONTRIBUTING.md new file mode 100644 index 000000000..6a81ba489 --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/CONTRIBUTING.md @@ -0,0 +1,77 @@ +# Contributing + +## Issues + +* Request features and report bugs using the [GitHub Issue Tracker](https://github.com/fsnotify/fsnotify/issues). +* Please indicate the platform you are using fsnotify on. +* A code example to reproduce the problem is appreciated. + +## Pull Requests + +### Contributor License Agreement + +fsnotify is derived from code in the [golang.org/x/exp](https://godoc.org/golang.org/x/exp) package and it may be included [in the standard library](https://github.com/fsnotify/fsnotify/issues/1) in the future. Therefore fsnotify carries the same [LICENSE](https://github.com/fsnotify/fsnotify/blob/master/LICENSE) as Go. Contributors retain their copyright, so you need to fill out a short form before we can accept your contribution: [Google Individual Contributor License Agreement](https://developers.google.com/open-source/cla/individual). + +Please indicate that you have signed the CLA in your pull request. + +### How fsnotify is Developed + +* Development is done on feature branches. +* Tests are run on BSD, Linux, OS X and Windows. +* Pull requests are reviewed and [applied to master][am] using [hub][]. + * Maintainers may modify or squash commits rather than asking contributors to. +* To issue a new release, the maintainers will: + * Update the CHANGELOG + * Tag a version, which will become available through gopkg.in. + +### How to Fork + +For smooth sailing, always use the original import path. Installing with `go get` makes this easy. + +1. Install from GitHub (`go get -u github.com/fsnotify/fsnotify`) +2. Create your feature branch (`git checkout -b my-new-feature`) +3. Ensure everything works and the tests pass (see below) +4. Commit your changes (`git commit -am 'Add some feature'`) + +Contribute upstream: + +1. Fork fsnotify on GitHub +2. Add your remote (`git remote add fork git@github.com:mycompany/repo.git`) +3. Push to the branch (`git push fork my-new-feature`) +4. Create a new Pull Request on GitHub + +This workflow is [thoroughly explained by Katrina Owen](https://splice.com/blog/contributing-open-source-git-repositories-go/). + +### Testing + +fsnotify uses build tags to compile different code on Linux, BSD, OS X, and Windows. + +Before doing a pull request, please do your best to test your changes on multiple platforms, and list which platforms you were able/unable to test on. + +To aid in cross-platform testing there is a Vagrantfile for Linux and BSD. + +* Install [Vagrant](http://www.vagrantup.com/) and [VirtualBox](https://www.virtualbox.org/) +* Setup [Vagrant Gopher](https://github.com/nathany/vagrant-gopher) in your `src` folder. +* Run `vagrant up` from the project folder. You can also setup just one box with `vagrant up linux` or `vagrant up bsd` (note: the BSD box doesn't support Windows hosts at this time, and NFS may prompt for your host OS password) +* Once setup, you can run the test suite on a given OS with a single command `vagrant ssh linux -c 'cd fsnotify/fsnotify; go test'`. +* When you're done, you will want to halt or destroy the Vagrant boxes. + +Notice: fsnotify file system events won't trigger in shared folders. The tests get around this limitation by using the /tmp directory. + +Right now there is no equivalent solution for Windows and OS X, but there are Windows VMs [freely available from Microsoft](http://www.modern.ie/en-us/virtualization-tools#downloads). + +### Maintainers + +Help maintaining fsnotify is welcome. To be a maintainer: + +* Submit a pull request and sign the CLA as above. +* You must be able to run the test suite on Mac, Windows, Linux and BSD. + +To keep master clean, the fsnotify project uses the "apply mail" workflow outlined in Nathaniel Talbott's post ["Merge pull request" Considered Harmful][am]. This requires installing [hub][]. + +All code changes should be internal pull requests. + +Releases are tagged using [Semantic Versioning](http://semver.org/). + +[hub]: https://github.com/github/hub +[am]: http://blog.spreedly.com/2014/06/24/merge-pull-request-considered-harmful/#.VGa5yZPF_Zs diff --git a/vendor/github.com/fsnotify/fsnotify/LICENSE b/vendor/github.com/fsnotify/fsnotify/LICENSE new file mode 100644 index 000000000..f21e54080 --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/LICENSE @@ -0,0 +1,28 @@ +Copyright (c) 2012 The Go Authors. All rights reserved. +Copyright (c) 2012 fsnotify Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/fsnotify/fsnotify/README.md b/vendor/github.com/fsnotify/fsnotify/README.md new file mode 100644 index 000000000..3c891e349 --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/README.md @@ -0,0 +1,50 @@ +# File system notifications for Go + +[![GoDoc](https://godoc.org/github.com/fsnotify/fsnotify?status.svg)](https://godoc.org/github.com/fsnotify/fsnotify) [![Go Report Card](https://goreportcard.com/badge/github.com/fsnotify/fsnotify)](https://goreportcard.com/report/github.com/fsnotify/fsnotify) + +fsnotify utilizes [golang.org/x/sys](https://godoc.org/golang.org/x/sys) rather than `syscall` from the standard library. Ensure you have the latest version installed by running: + +```console +go get -u golang.org/x/sys/... +``` + +Cross platform: Windows, Linux, BSD and OS X. + +|Adapter |OS |Status | +|----------|----------|----------| +|inotify |Linux 2.6.27 or later, Android\*|Supported [![Build Status](https://travis-ci.org/fsnotify/fsnotify.svg?branch=master)](https://travis-ci.org/fsnotify/fsnotify)| +|kqueue |BSD, OS X, iOS\*|Supported [![Build Status](https://travis-ci.org/fsnotify/fsnotify.svg?branch=master)](https://travis-ci.org/fsnotify/fsnotify)| +|ReadDirectoryChangesW|Windows|Supported [![Build status](https://ci.appveyor.com/api/projects/status/ivwjubaih4r0udeh/branch/master?svg=true)](https://ci.appveyor.com/project/NathanYoungman/fsnotify/branch/master)| +|FSEvents |OS X |[Planned](https://github.com/fsnotify/fsnotify/issues/11)| +|FEN |Solaris 11 |[In Progress](https://github.com/fsnotify/fsnotify/issues/12)| +|fanotify |Linux 2.6.37+ | | +|USN Journals |Windows |[Maybe](https://github.com/fsnotify/fsnotify/issues/53)| +|Polling |*All* |[Maybe](https://github.com/fsnotify/fsnotify/issues/9)| + +\* Android and iOS are untested. + +Please see [the documentation](https://godoc.org/github.com/fsnotify/fsnotify) for usage. Consult the [Wiki](https://github.com/fsnotify/fsnotify/wiki) for the FAQ and further information. + +## API stability + +fsnotify is a fork of [howeyc/fsnotify](https://godoc.org/github.com/howeyc/fsnotify) with a new API as of v1.0. The API is based on [this design document](http://goo.gl/MrYxyA). + +All [releases](https://github.com/fsnotify/fsnotify/releases) are tagged based on [Semantic Versioning](http://semver.org/). Further API changes are [planned](https://github.com/fsnotify/fsnotify/milestones), and will be tagged with a new major revision number. + +Go 1.6 supports dependencies located in the `vendor/` folder. Unless you are creating a library, it is recommended that you copy fsnotify into `vendor/github.com/fsnotify/fsnotify` within your project, and likewise for `golang.org/x/sys`. + +## Contributing + +Please refer to [CONTRIBUTING][] before opening an issue or pull request. + +## Example + +See [example_test.go](https://github.com/fsnotify/fsnotify/blob/master/example_test.go). + +[contributing]: https://github.com/fsnotify/fsnotify/blob/master/CONTRIBUTING.md + +## Related Projects + +* [notify](https://github.com/rjeczalik/notify) +* [fsevents](https://github.com/fsnotify/fsevents) + diff --git a/vendor/github.com/fsnotify/fsnotify/example_test.go b/vendor/github.com/fsnotify/fsnotify/example_test.go new file mode 100644 index 000000000..700502cb3 --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/example_test.go @@ -0,0 +1,42 @@ +// Copyright 2012 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !plan9 + +package fsnotify_test + +import ( + "log" + + "github.com/fsnotify/fsnotify" +) + +func ExampleNewWatcher() { + watcher, err := fsnotify.NewWatcher() + if err != nil { + log.Fatal(err) + } + defer watcher.Close() + + done := make(chan bool) + go func() { + for { + select { + case event := <-watcher.Events: + log.Println("event:", event) + if event.Op&fsnotify.Write == fsnotify.Write { + log.Println("modified file:", event.Name) + } + case err := <-watcher.Errors: + log.Println("error:", err) + } + } + }() + + err = watcher.Add("/tmp/foo") + if err != nil { + log.Fatal(err) + } + <-done +} diff --git a/vendor/github.com/fsnotify/fsnotify/fen.go b/vendor/github.com/fsnotify/fsnotify/fen.go new file mode 100644 index 000000000..ced39cb88 --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/fen.go @@ -0,0 +1,37 @@ +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build solaris + +package fsnotify + +import ( + "errors" +) + +// Watcher watches a set of files, delivering events to a channel. +type Watcher struct { + Events chan Event + Errors chan error +} + +// NewWatcher establishes a new watcher with the underlying OS and begins waiting for events. +func NewWatcher() (*Watcher, error) { + return nil, errors.New("FEN based watcher not yet supported for fsnotify\n") +} + +// Close removes all watches and closes the events channel. +func (w *Watcher) Close() error { + return nil +} + +// Add starts watching the named file or directory (non-recursively). +func (w *Watcher) Add(name string) error { + return nil +} + +// Remove stops watching the the named file or directory (non-recursively). +func (w *Watcher) Remove(name string) error { + return nil +} diff --git a/vendor/github.com/fsnotify/fsnotify/fsnotify.go b/vendor/github.com/fsnotify/fsnotify/fsnotify.go new file mode 100644 index 000000000..e7f55fee7 --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/fsnotify.go @@ -0,0 +1,62 @@ +// Copyright 2012 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !plan9 + +// Package fsnotify provides a platform-independent interface for file system notifications. +package fsnotify + +import ( + "bytes" + "fmt" +) + +// Event represents a single file system notification. +type Event struct { + Name string // Relative path to the file or directory. + Op Op // File operation that triggered the event. +} + +// Op describes a set of file operations. +type Op uint32 + +// These are the generalized file operations that can trigger a notification. +const ( + Create Op = 1 << iota + Write + Remove + Rename + Chmod +) + +func (op Op) String() string { + // Use a buffer for efficient string concatenation + var buffer bytes.Buffer + + if op&Create == Create { + buffer.WriteString("|CREATE") + } + if op&Remove == Remove { + buffer.WriteString("|REMOVE") + } + if op&Write == Write { + buffer.WriteString("|WRITE") + } + if op&Rename == Rename { + buffer.WriteString("|RENAME") + } + if op&Chmod == Chmod { + buffer.WriteString("|CHMOD") + } + if buffer.Len() == 0 { + return "" + } + return buffer.String()[1:] // Strip leading pipe +} + +// String returns a string representation of the event in the form +// "file: REMOVE|WRITE|..." +func (e Event) String() string { + return fmt.Sprintf("%q: %s", e.Name, e.Op.String()) +} diff --git a/vendor/github.com/fsnotify/fsnotify/fsnotify_test.go b/vendor/github.com/fsnotify/fsnotify/fsnotify_test.go new file mode 100644 index 000000000..9d6d72afc --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/fsnotify_test.go @@ -0,0 +1,40 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !plan9 + +package fsnotify + +import "testing" + +func TestEventStringWithValue(t *testing.T) { + for opMask, expectedString := range map[Op]string{ + Chmod | Create: `"/usr/someFile": CREATE|CHMOD`, + Rename: `"/usr/someFile": RENAME`, + Remove: `"/usr/someFile": REMOVE`, + Write | Chmod: `"/usr/someFile": WRITE|CHMOD`, + } { + event := Event{Name: "/usr/someFile", Op: opMask} + if event.String() != expectedString { + t.Fatalf("Expected %s, got: %v", expectedString, event.String()) + } + + } +} + +func TestEventOpStringWithValue(t *testing.T) { + expectedOpString := "WRITE|CHMOD" + event := Event{Name: "someFile", Op: Write | Chmod} + if event.Op.String() != expectedOpString { + t.Fatalf("Expected %s, got: %v", expectedOpString, event.Op.String()) + } +} + +func TestEventOpStringWithNoValue(t *testing.T) { + expectedOpString := "" + event := Event{Name: "testFile", Op: 0} + if event.Op.String() != expectedOpString { + t.Fatalf("Expected %s, got: %v", expectedOpString, event.Op.String()) + } +} diff --git a/vendor/github.com/fsnotify/fsnotify/inotify.go b/vendor/github.com/fsnotify/fsnotify/inotify.go new file mode 100644 index 000000000..f3b74c51f --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/inotify.go @@ -0,0 +1,325 @@ +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build linux + +package fsnotify + +import ( + "errors" + "fmt" + "io" + "os" + "path/filepath" + "strings" + "sync" + "unsafe" + + "golang.org/x/sys/unix" +) + +// Watcher watches a set of files, delivering events to a channel. +type Watcher struct { + Events chan Event + Errors chan error + mu sync.Mutex // Map access + cv *sync.Cond // sync removing on rm_watch with IN_IGNORE + fd int + poller *fdPoller + watches map[string]*watch // Map of inotify watches (key: path) + paths map[int]string // Map of watched paths (key: watch descriptor) + done chan struct{} // Channel for sending a "quit message" to the reader goroutine + doneResp chan struct{} // Channel to respond to Close +} + +// NewWatcher establishes a new watcher with the underlying OS and begins waiting for events. +func NewWatcher() (*Watcher, error) { + // Create inotify fd + fd, errno := unix.InotifyInit1(unix.IN_CLOEXEC) + if fd == -1 { + return nil, errno + } + // Create epoll + poller, err := newFdPoller(fd) + if err != nil { + unix.Close(fd) + return nil, err + } + w := &Watcher{ + fd: fd, + poller: poller, + watches: make(map[string]*watch), + paths: make(map[int]string), + Events: make(chan Event), + Errors: make(chan error), + done: make(chan struct{}), + doneResp: make(chan struct{}), + } + w.cv = sync.NewCond(&w.mu) + + go w.readEvents() + return w, nil +} + +func (w *Watcher) isClosed() bool { + select { + case <-w.done: + return true + default: + return false + } +} + +// Close removes all watches and closes the events channel. +func (w *Watcher) Close() error { + if w.isClosed() { + return nil + } + + // Send 'close' signal to goroutine, and set the Watcher to closed. + close(w.done) + + // Wake up goroutine + w.poller.wake() + + // Wait for goroutine to close + <-w.doneResp + + return nil +} + +// Add starts watching the named file or directory (non-recursively). +func (w *Watcher) Add(name string) error { + name = filepath.Clean(name) + if w.isClosed() { + return errors.New("inotify instance already closed") + } + + const agnosticEvents = unix.IN_MOVED_TO | unix.IN_MOVED_FROM | + unix.IN_CREATE | unix.IN_ATTRIB | unix.IN_MODIFY | + unix.IN_MOVE_SELF | unix.IN_DELETE | unix.IN_DELETE_SELF + + var flags uint32 = agnosticEvents + + w.mu.Lock() + watchEntry, found := w.watches[name] + w.mu.Unlock() + if found { + watchEntry.flags |= flags + flags |= unix.IN_MASK_ADD + } + wd, errno := unix.InotifyAddWatch(w.fd, name, flags) + if wd == -1 { + return errno + } + + w.mu.Lock() + w.watches[name] = &watch{wd: uint32(wd), flags: flags} + w.paths[wd] = name + w.mu.Unlock() + + return nil +} + +// Remove stops watching the named file or directory (non-recursively). +func (w *Watcher) Remove(name string) error { + name = filepath.Clean(name) + + // Fetch the watch. + w.mu.Lock() + defer w.mu.Unlock() + watch, ok := w.watches[name] + + // Remove it from inotify. + if !ok { + return fmt.Errorf("can't remove non-existent inotify watch for: %s", name) + } + // inotify_rm_watch will return EINVAL if the file has been deleted; + // the inotify will already have been removed. + // watches and pathes are deleted in ignoreLinux() implicitly and asynchronously + // by calling inotify_rm_watch() below. e.g. readEvents() goroutine receives IN_IGNORE + // so that EINVAL means that the wd is being rm_watch()ed or its file removed + // by another thread and we have not received IN_IGNORE event. + success, errno := unix.InotifyRmWatch(w.fd, watch.wd) + if success == -1 { + // TODO: Perhaps it's not helpful to return an error here in every case. + // the only two possible errors are: + // EBADF, which happens when w.fd is not a valid file descriptor of any kind. + // EINVAL, which is when fd is not an inotify descriptor or wd is not a valid watch descriptor. + // Watch descriptors are invalidated when they are removed explicitly or implicitly; + // explicitly by inotify_rm_watch, implicitly when the file they are watching is deleted. + return errno + } + + // wait until ignoreLinux() deleting maps + exists := true + for exists { + w.cv.Wait() + _, exists = w.watches[name] + } + + return nil +} + +type watch struct { + wd uint32 // Watch descriptor (as returned by the inotify_add_watch() syscall) + flags uint32 // inotify flags of this watch (see inotify(7) for the list of valid flags) +} + +// readEvents reads from the inotify file descriptor, converts the +// received events into Event objects and sends them via the Events channel +func (w *Watcher) readEvents() { + var ( + buf [unix.SizeofInotifyEvent * 4096]byte // Buffer for a maximum of 4096 raw events + n int // Number of bytes read with read() + errno error // Syscall errno + ok bool // For poller.wait + ) + + defer close(w.doneResp) + defer close(w.Errors) + defer close(w.Events) + defer unix.Close(w.fd) + defer w.poller.close() + + for { + // See if we have been closed. + if w.isClosed() { + return + } + + ok, errno = w.poller.wait() + if errno != nil { + select { + case w.Errors <- errno: + case <-w.done: + return + } + continue + } + + if !ok { + continue + } + + n, errno = unix.Read(w.fd, buf[:]) + // If a signal interrupted execution, see if we've been asked to close, and try again. + // http://man7.org/linux/man-pages/man7/signal.7.html : + // "Before Linux 3.8, reads from an inotify(7) file descriptor were not restartable" + if errno == unix.EINTR { + continue + } + + // unix.Read might have been woken up by Close. If so, we're done. + if w.isClosed() { + return + } + + if n < unix.SizeofInotifyEvent { + var err error + if n == 0 { + // If EOF is received. This should really never happen. + err = io.EOF + } else if n < 0 { + // If an error occurred while reading. + err = errno + } else { + // Read was too short. + err = errors.New("notify: short read in readEvents()") + } + select { + case w.Errors <- err: + case <-w.done: + return + } + continue + } + + var offset uint32 + // We don't know how many events we just read into the buffer + // While the offset points to at least one whole event... + for offset <= uint32(n-unix.SizeofInotifyEvent) { + // Point "raw" to the event in the buffer + raw := (*unix.InotifyEvent)(unsafe.Pointer(&buf[offset])) + + mask := uint32(raw.Mask) + nameLen := uint32(raw.Len) + // If the event happened to the watched directory or the watched file, the kernel + // doesn't append the filename to the event, but we would like to always fill the + // the "Name" field with a valid filename. We retrieve the path of the watch from + // the "paths" map. + w.mu.Lock() + name := w.paths[int(raw.Wd)] + w.mu.Unlock() + if nameLen > 0 { + // Point "bytes" at the first byte of the filename + bytes := (*[unix.PathMax]byte)(unsafe.Pointer(&buf[offset+unix.SizeofInotifyEvent])) + // The filename is padded with NULL bytes. TrimRight() gets rid of those. + name += "/" + strings.TrimRight(string(bytes[0:nameLen]), "\000") + } + + event := newEvent(name, mask) + + // Send the events that are not ignored on the events channel + if !event.ignoreLinux(w, raw.Wd, mask) { + select { + case w.Events <- event: + case <-w.done: + return + } + } + + // Move to the next event in the buffer + offset += unix.SizeofInotifyEvent + nameLen + } + } +} + +// Certain types of events can be "ignored" and not sent over the Events +// channel. Such as events marked ignore by the kernel, or MODIFY events +// against files that do not exist. +func (e *Event) ignoreLinux(w *Watcher, wd int32, mask uint32) bool { + // Ignore anything the inotify API says to ignore + if mask&unix.IN_IGNORED == unix.IN_IGNORED { + w.mu.Lock() + defer w.mu.Unlock() + name := w.paths[int(wd)] + delete(w.paths, int(wd)) + delete(w.watches, name) + w.cv.Broadcast() + return true + } + + // If the event is not a DELETE or RENAME, the file must exist. + // Otherwise the event is ignored. + // *Note*: this was put in place because it was seen that a MODIFY + // event was sent after the DELETE. This ignores that MODIFY and + // assumes a DELETE will come or has come if the file doesn't exist. + if !(e.Op&Remove == Remove || e.Op&Rename == Rename) { + _, statErr := os.Lstat(e.Name) + return os.IsNotExist(statErr) + } + return false +} + +// newEvent returns an platform-independent Event based on an inotify mask. +func newEvent(name string, mask uint32) Event { + e := Event{Name: name} + if mask&unix.IN_CREATE == unix.IN_CREATE || mask&unix.IN_MOVED_TO == unix.IN_MOVED_TO { + e.Op |= Create + } + if mask&unix.IN_DELETE_SELF == unix.IN_DELETE_SELF || mask&unix.IN_DELETE == unix.IN_DELETE { + e.Op |= Remove + } + if mask&unix.IN_MODIFY == unix.IN_MODIFY { + e.Op |= Write + } + if mask&unix.IN_MOVE_SELF == unix.IN_MOVE_SELF || mask&unix.IN_MOVED_FROM == unix.IN_MOVED_FROM { + e.Op |= Rename + } + if mask&unix.IN_ATTRIB == unix.IN_ATTRIB { + e.Op |= Chmod + } + return e +} diff --git a/vendor/github.com/fsnotify/fsnotify/inotify_poller.go b/vendor/github.com/fsnotify/fsnotify/inotify_poller.go new file mode 100644 index 000000000..cc7db4b22 --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/inotify_poller.go @@ -0,0 +1,187 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build linux + +package fsnotify + +import ( + "errors" + + "golang.org/x/sys/unix" +) + +type fdPoller struct { + fd int // File descriptor (as returned by the inotify_init() syscall) + epfd int // Epoll file descriptor + pipe [2]int // Pipe for waking up +} + +func emptyPoller(fd int) *fdPoller { + poller := new(fdPoller) + poller.fd = fd + poller.epfd = -1 + poller.pipe[0] = -1 + poller.pipe[1] = -1 + return poller +} + +// Create a new inotify poller. +// This creates an inotify handler, and an epoll handler. +func newFdPoller(fd int) (*fdPoller, error) { + var errno error + poller := emptyPoller(fd) + defer func() { + if errno != nil { + poller.close() + } + }() + poller.fd = fd + + // Create epoll fd + poller.epfd, errno = unix.EpollCreate1(0) + if poller.epfd == -1 { + return nil, errno + } + // Create pipe; pipe[0] is the read end, pipe[1] the write end. + errno = unix.Pipe2(poller.pipe[:], unix.O_NONBLOCK) + if errno != nil { + return nil, errno + } + + // Register inotify fd with epoll + event := unix.EpollEvent{ + Fd: int32(poller.fd), + Events: unix.EPOLLIN, + } + errno = unix.EpollCtl(poller.epfd, unix.EPOLL_CTL_ADD, poller.fd, &event) + if errno != nil { + return nil, errno + } + + // Register pipe fd with epoll + event = unix.EpollEvent{ + Fd: int32(poller.pipe[0]), + Events: unix.EPOLLIN, + } + errno = unix.EpollCtl(poller.epfd, unix.EPOLL_CTL_ADD, poller.pipe[0], &event) + if errno != nil { + return nil, errno + } + + return poller, nil +} + +// Wait using epoll. +// Returns true if something is ready to be read, +// false if there is not. +func (poller *fdPoller) wait() (bool, error) { + // 3 possible events per fd, and 2 fds, makes a maximum of 6 events. + // I don't know whether epoll_wait returns the number of events returned, + // or the total number of events ready. + // I decided to catch both by making the buffer one larger than the maximum. + events := make([]unix.EpollEvent, 7) + for { + n, errno := unix.EpollWait(poller.epfd, events, -1) + if n == -1 { + if errno == unix.EINTR { + continue + } + return false, errno + } + if n == 0 { + // If there are no events, try again. + continue + } + if n > 6 { + // This should never happen. More events were returned than should be possible. + return false, errors.New("epoll_wait returned more events than I know what to do with") + } + ready := events[:n] + epollhup := false + epollerr := false + epollin := false + for _, event := range ready { + if event.Fd == int32(poller.fd) { + if event.Events&unix.EPOLLHUP != 0 { + // This should not happen, but if it does, treat it as a wakeup. + epollhup = true + } + if event.Events&unix.EPOLLERR != 0 { + // If an error is waiting on the file descriptor, we should pretend + // something is ready to read, and let unix.Read pick up the error. + epollerr = true + } + if event.Events&unix.EPOLLIN != 0 { + // There is data to read. + epollin = true + } + } + if event.Fd == int32(poller.pipe[0]) { + if event.Events&unix.EPOLLHUP != 0 { + // Write pipe descriptor was closed, by us. This means we're closing down the + // watcher, and we should wake up. + } + if event.Events&unix.EPOLLERR != 0 { + // If an error is waiting on the pipe file descriptor. + // This is an absolute mystery, and should never ever happen. + return false, errors.New("Error on the pipe descriptor.") + } + if event.Events&unix.EPOLLIN != 0 { + // This is a regular wakeup, so we have to clear the buffer. + err := poller.clearWake() + if err != nil { + return false, err + } + } + } + } + + if epollhup || epollerr || epollin { + return true, nil + } + return false, nil + } +} + +// Close the write end of the poller. +func (poller *fdPoller) wake() error { + buf := make([]byte, 1) + n, errno := unix.Write(poller.pipe[1], buf) + if n == -1 { + if errno == unix.EAGAIN { + // Buffer is full, poller will wake. + return nil + } + return errno + } + return nil +} + +func (poller *fdPoller) clearWake() error { + // You have to be woken up a LOT in order to get to 100! + buf := make([]byte, 100) + n, errno := unix.Read(poller.pipe[0], buf) + if n == -1 { + if errno == unix.EAGAIN { + // Buffer is empty, someone else cleared our wake. + return nil + } + return errno + } + return nil +} + +// Close all poller file descriptors, but not the one passed to it. +func (poller *fdPoller) close() { + if poller.pipe[1] != -1 { + unix.Close(poller.pipe[1]) + } + if poller.pipe[0] != -1 { + unix.Close(poller.pipe[0]) + } + if poller.epfd != -1 { + unix.Close(poller.epfd) + } +} diff --git a/vendor/github.com/fsnotify/fsnotify/inotify_poller_test.go b/vendor/github.com/fsnotify/fsnotify/inotify_poller_test.go new file mode 100644 index 000000000..26623efef --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/inotify_poller_test.go @@ -0,0 +1,229 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build linux + +package fsnotify + +import ( + "testing" + "time" + + "golang.org/x/sys/unix" +) + +type testFd [2]int + +func makeTestFd(t *testing.T) testFd { + var tfd testFd + errno := unix.Pipe(tfd[:]) + if errno != nil { + t.Fatalf("Failed to create pipe: %v", errno) + } + return tfd +} + +func (tfd testFd) fd() int { + return tfd[0] +} + +func (tfd testFd) closeWrite(t *testing.T) { + errno := unix.Close(tfd[1]) + if errno != nil { + t.Fatalf("Failed to close write end of pipe: %v", errno) + } +} + +func (tfd testFd) put(t *testing.T) { + buf := make([]byte, 10) + _, errno := unix.Write(tfd[1], buf) + if errno != nil { + t.Fatalf("Failed to write to pipe: %v", errno) + } +} + +func (tfd testFd) get(t *testing.T) { + buf := make([]byte, 10) + _, errno := unix.Read(tfd[0], buf) + if errno != nil { + t.Fatalf("Failed to read from pipe: %v", errno) + } +} + +func (tfd testFd) close() { + unix.Close(tfd[1]) + unix.Close(tfd[0]) +} + +func makePoller(t *testing.T) (testFd, *fdPoller) { + tfd := makeTestFd(t) + poller, err := newFdPoller(tfd.fd()) + if err != nil { + t.Fatalf("Failed to create poller: %v", err) + } + return tfd, poller +} + +func TestPollerWithBadFd(t *testing.T) { + _, err := newFdPoller(-1) + if err != unix.EBADF { + t.Fatalf("Expected EBADF, got: %v", err) + } +} + +func TestPollerWithData(t *testing.T) { + tfd, poller := makePoller(t) + defer tfd.close() + defer poller.close() + + tfd.put(t) + ok, err := poller.wait() + if err != nil { + t.Fatalf("poller failed: %v", err) + } + if !ok { + t.Fatalf("expected poller to return true") + } + tfd.get(t) +} + +func TestPollerWithWakeup(t *testing.T) { + tfd, poller := makePoller(t) + defer tfd.close() + defer poller.close() + + err := poller.wake() + if err != nil { + t.Fatalf("wake failed: %v", err) + } + ok, err := poller.wait() + if err != nil { + t.Fatalf("poller failed: %v", err) + } + if ok { + t.Fatalf("expected poller to return false") + } +} + +func TestPollerWithClose(t *testing.T) { + tfd, poller := makePoller(t) + defer tfd.close() + defer poller.close() + + tfd.closeWrite(t) + ok, err := poller.wait() + if err != nil { + t.Fatalf("poller failed: %v", err) + } + if !ok { + t.Fatalf("expected poller to return true") + } +} + +func TestPollerWithWakeupAndData(t *testing.T) { + tfd, poller := makePoller(t) + defer tfd.close() + defer poller.close() + + tfd.put(t) + err := poller.wake() + if err != nil { + t.Fatalf("wake failed: %v", err) + } + + // both data and wakeup + ok, err := poller.wait() + if err != nil { + t.Fatalf("poller failed: %v", err) + } + if !ok { + t.Fatalf("expected poller to return true") + } + + // data is still in the buffer, wakeup is cleared + ok, err = poller.wait() + if err != nil { + t.Fatalf("poller failed: %v", err) + } + if !ok { + t.Fatalf("expected poller to return true") + } + + tfd.get(t) + // data is gone, only wakeup now + err = poller.wake() + if err != nil { + t.Fatalf("wake failed: %v", err) + } + ok, err = poller.wait() + if err != nil { + t.Fatalf("poller failed: %v", err) + } + if ok { + t.Fatalf("expected poller to return false") + } +} + +func TestPollerConcurrent(t *testing.T) { + tfd, poller := makePoller(t) + defer tfd.close() + defer poller.close() + + oks := make(chan bool) + live := make(chan bool) + defer close(live) + go func() { + defer close(oks) + for { + ok, err := poller.wait() + if err != nil { + t.Fatalf("poller failed: %v", err) + } + oks <- ok + if !<-live { + return + } + } + }() + + // Try a write + select { + case <-time.After(50 * time.Millisecond): + case <-oks: + t.Fatalf("poller did not wait") + } + tfd.put(t) + if !<-oks { + t.Fatalf("expected true") + } + tfd.get(t) + live <- true + + // Try a wakeup + select { + case <-time.After(50 * time.Millisecond): + case <-oks: + t.Fatalf("poller did not wait") + } + err := poller.wake() + if err != nil { + t.Fatalf("wake failed: %v", err) + } + if <-oks { + t.Fatalf("expected false") + } + live <- true + + // Try a close + select { + case <-time.After(50 * time.Millisecond): + case <-oks: + t.Fatalf("poller did not wait") + } + tfd.closeWrite(t) + if !<-oks { + t.Fatalf("expected true") + } + tfd.get(t) +} diff --git a/vendor/github.com/fsnotify/fsnotify/inotify_test.go b/vendor/github.com/fsnotify/fsnotify/inotify_test.go new file mode 100644 index 000000000..a4bb202d1 --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/inotify_test.go @@ -0,0 +1,360 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build linux + +package fsnotify + +import ( + "fmt" + "os" + "path/filepath" + "strings" + "testing" + "time" +) + +func TestInotifyCloseRightAway(t *testing.T) { + w, err := NewWatcher() + if err != nil { + t.Fatalf("Failed to create watcher") + } + + // Close immediately; it won't even reach the first unix.Read. + w.Close() + + // Wait for the close to complete. + <-time.After(50 * time.Millisecond) + isWatcherReallyClosed(t, w) +} + +func TestInotifyCloseSlightlyLater(t *testing.T) { + w, err := NewWatcher() + if err != nil { + t.Fatalf("Failed to create watcher") + } + + // Wait until readEvents has reached unix.Read, and Close. + <-time.After(50 * time.Millisecond) + w.Close() + + // Wait for the close to complete. + <-time.After(50 * time.Millisecond) + isWatcherReallyClosed(t, w) +} + +func TestInotifyCloseSlightlyLaterWithWatch(t *testing.T) { + testDir := tempMkdir(t) + defer os.RemoveAll(testDir) + + w, err := NewWatcher() + if err != nil { + t.Fatalf("Failed to create watcher") + } + w.Add(testDir) + + // Wait until readEvents has reached unix.Read, and Close. + <-time.After(50 * time.Millisecond) + w.Close() + + // Wait for the close to complete. + <-time.After(50 * time.Millisecond) + isWatcherReallyClosed(t, w) +} + +func TestInotifyCloseAfterRead(t *testing.T) { + testDir := tempMkdir(t) + defer os.RemoveAll(testDir) + + w, err := NewWatcher() + if err != nil { + t.Fatalf("Failed to create watcher") + } + + err = w.Add(testDir) + if err != nil { + t.Fatalf("Failed to add .") + } + + // Generate an event. + os.Create(filepath.Join(testDir, "somethingSOMETHINGsomethingSOMETHING")) + + // Wait for readEvents to read the event, then close the watcher. + <-time.After(50 * time.Millisecond) + w.Close() + + // Wait for the close to complete. + <-time.After(50 * time.Millisecond) + isWatcherReallyClosed(t, w) +} + +func isWatcherReallyClosed(t *testing.T, w *Watcher) { + select { + case err, ok := <-w.Errors: + if ok { + t.Fatalf("w.Errors is not closed; readEvents is still alive after closing (error: %v)", err) + } + default: + t.Fatalf("w.Errors would have blocked; readEvents is still alive!") + } + + select { + case _, ok := <-w.Events: + if ok { + t.Fatalf("w.Events is not closed; readEvents is still alive after closing") + } + default: + t.Fatalf("w.Events would have blocked; readEvents is still alive!") + } +} + +func TestInotifyCloseCreate(t *testing.T) { + testDir := tempMkdir(t) + defer os.RemoveAll(testDir) + + w, err := NewWatcher() + if err != nil { + t.Fatalf("Failed to create watcher: %v", err) + } + defer w.Close() + + err = w.Add(testDir) + if err != nil { + t.Fatalf("Failed to add testDir: %v", err) + } + h, err := os.Create(filepath.Join(testDir, "testfile")) + if err != nil { + t.Fatalf("Failed to create file in testdir: %v", err) + } + h.Close() + select { + case _ = <-w.Events: + case err := <-w.Errors: + t.Fatalf("Error from watcher: %v", err) + case <-time.After(50 * time.Millisecond): + t.Fatalf("Took too long to wait for event") + } + + // At this point, we've received one event, so the goroutine is ready. + // It's also blocking on unix.Read. + // Now we try to swap the file descriptor under its nose. + w.Close() + w, err = NewWatcher() + defer w.Close() + if err != nil { + t.Fatalf("Failed to create second watcher: %v", err) + } + + <-time.After(50 * time.Millisecond) + err = w.Add(testDir) + if err != nil { + t.Fatalf("Error adding testDir again: %v", err) + } +} + +// This test verifies the watcher can keep up with file creations/deletions +// when under load. +func TestInotifyStress(t *testing.T) { + maxNumToCreate := 1000 + + testDir := tempMkdir(t) + defer os.RemoveAll(testDir) + testFilePrefix := filepath.Join(testDir, "testfile") + + w, err := NewWatcher() + if err != nil { + t.Fatalf("Failed to create watcher: %v", err) + } + defer w.Close() + + err = w.Add(testDir) + if err != nil { + t.Fatalf("Failed to add testDir: %v", err) + } + + doneChan := make(chan struct{}) + // The buffer ensures that the file generation goroutine is never blocked. + errChan := make(chan error, 2*maxNumToCreate) + + go func() { + for i := 0; i < maxNumToCreate; i++ { + testFile := fmt.Sprintf("%s%d", testFilePrefix, i) + + handle, err := os.Create(testFile) + if err != nil { + errChan <- fmt.Errorf("Create failed: %v", err) + continue + } + + err = handle.Close() + if err != nil { + errChan <- fmt.Errorf("Close failed: %v", err) + continue + } + } + + // If we delete a newly created file too quickly, inotify will skip the + // create event and only send the delete event. + time.Sleep(100 * time.Millisecond) + + for i := 0; i < maxNumToCreate; i++ { + testFile := fmt.Sprintf("%s%d", testFilePrefix, i) + err = os.Remove(testFile) + if err != nil { + errChan <- fmt.Errorf("Remove failed: %v", err) + } + } + + close(doneChan) + }() + + creates := 0 + removes := 0 + + finished := false + after := time.After(10 * time.Second) + for !finished { + select { + case <-after: + t.Fatalf("Not done") + case <-doneChan: + finished = true + case err := <-errChan: + t.Fatalf("Got an error from file creator goroutine: %v", err) + case err := <-w.Errors: + t.Fatalf("Got an error from watcher: %v", err) + case evt := <-w.Events: + if !strings.HasPrefix(evt.Name, testFilePrefix) { + t.Fatalf("Got an event for an unknown file: %s", evt.Name) + } + if evt.Op == Create { + creates++ + } + if evt.Op == Remove { + removes++ + } + } + } + + // Drain remaining events from channels + count := 0 + for count < 10 { + select { + case err := <-errChan: + t.Fatalf("Got an error from file creator goroutine: %v", err) + case err := <-w.Errors: + t.Fatalf("Got an error from watcher: %v", err) + case evt := <-w.Events: + if !strings.HasPrefix(evt.Name, testFilePrefix) { + t.Fatalf("Got an event for an unknown file: %s", evt.Name) + } + if evt.Op == Create { + creates++ + } + if evt.Op == Remove { + removes++ + } + count = 0 + default: + count++ + // Give the watcher chances to fill the channels. + time.Sleep(time.Millisecond) + } + } + + if creates-removes > 1 || creates-removes < -1 { + t.Fatalf("Creates and removes should not be off by more than one: %d creates, %d removes", creates, removes) + } + if creates < 50 { + t.Fatalf("Expected at least 50 creates, got %d", creates) + } +} + +func TestInotifyRemoveTwice(t *testing.T) { + testDir := tempMkdir(t) + defer os.RemoveAll(testDir) + testFile := filepath.Join(testDir, "testfile") + + handle, err := os.Create(testFile) + if err != nil { + t.Fatalf("Create failed: %v", err) + } + handle.Close() + + w, err := NewWatcher() + if err != nil { + t.Fatalf("Failed to create watcher: %v", err) + } + defer w.Close() + + err = w.Add(testFile) + if err != nil { + t.Fatalf("Failed to add testFile: %v", err) + } + + err = os.Remove(testFile) + if err != nil { + t.Fatalf("Failed to remove testFile: %v", err) + } + + err = w.Remove(testFile) + if err == nil { + t.Fatalf("no error on removing invalid file") + } + s1 := fmt.Sprintf("%s", err) + + err = w.Remove(testFile) + if err == nil { + t.Fatalf("no error on removing invalid file") + } + s2 := fmt.Sprintf("%s", err) + + if s1 != s2 { + t.Fatalf("receive different error - %s / %s", s1, s2) + } +} + +func TestInotifyInnerMapLength(t *testing.T) { + testDir := tempMkdir(t) + defer os.RemoveAll(testDir) + testFile := filepath.Join(testDir, "testfile") + + handle, err := os.Create(testFile) + if err != nil { + t.Fatalf("Create failed: %v", err) + } + handle.Close() + + w, err := NewWatcher() + if err != nil { + t.Fatalf("Failed to create watcher: %v", err) + } + defer w.Close() + + err = w.Add(testFile) + if err != nil { + t.Fatalf("Failed to add testFile: %v", err) + } + go func() { + for err := range w.Errors { + t.Fatalf("error received: %s", err) + } + }() + + err = os.Remove(testFile) + if err != nil { + t.Fatalf("Failed to remove testFile: %v", err) + } + _ = <-w.Events // consume Remove event + <-time.After(50 * time.Millisecond) // wait IN_IGNORE propagated + + w.mu.Lock() + defer w.mu.Unlock() + if len(w.watches) != 0 { + t.Fatalf("Expected watches len is 0, but got: %d, %v", len(w.watches), w.watches) + } + if len(w.paths) != 0 { + t.Fatalf("Expected paths len is 0, but got: %d, %v", len(w.paths), w.paths) + } +} diff --git a/vendor/github.com/fsnotify/fsnotify/integration_darwin_test.go b/vendor/github.com/fsnotify/fsnotify/integration_darwin_test.go new file mode 100644 index 000000000..5564554f7 --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/integration_darwin_test.go @@ -0,0 +1,147 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package fsnotify + +import ( + "os" + "path/filepath" + "testing" + "time" + + "golang.org/x/sys/unix" +) + +// testExchangedataForWatcher tests the watcher with the exchangedata operation on OS X. +// +// This is widely used for atomic saves on OS X, e.g. TextMate and in Apple's NSDocument. +// +// See https://developer.apple.com/library/mac/documentation/Darwin/Reference/ManPages/man2/exchangedata.2.html +// Also see: https://github.com/textmate/textmate/blob/cd016be29489eba5f3c09b7b70b06da134dda550/Frameworks/io/src/swap_file_data.cc#L20 +func testExchangedataForWatcher(t *testing.T, watchDir bool) { + // Create directory to watch + testDir1 := tempMkdir(t) + + // For the intermediate file + testDir2 := tempMkdir(t) + + defer os.RemoveAll(testDir1) + defer os.RemoveAll(testDir2) + + resolvedFilename := "TestFsnotifyEvents.file" + + // TextMate does: + // + // 1. exchangedata (intermediate, resolved) + // 2. unlink intermediate + // + // Let's try to simulate that: + resolved := filepath.Join(testDir1, resolvedFilename) + intermediate := filepath.Join(testDir2, resolvedFilename+"~") + + // Make sure we create the file before we start watching + createAndSyncFile(t, resolved) + + watcher := newWatcher(t) + + // Test both variants in isolation + if watchDir { + addWatch(t, watcher, testDir1) + } else { + addWatch(t, watcher, resolved) + } + + // Receive errors on the error channel on a separate goroutine + go func() { + for err := range watcher.Errors { + t.Fatalf("error received: %s", err) + } + }() + + // Receive events on the event channel on a separate goroutine + eventstream := watcher.Events + var removeReceived counter + var createReceived counter + + done := make(chan bool) + + go func() { + for event := range eventstream { + // Only count relevant events + if event.Name == filepath.Clean(resolved) { + if event.Op&Remove == Remove { + removeReceived.increment() + } + if event.Op&Create == Create { + createReceived.increment() + } + } + t.Logf("event received: %s", event) + } + done <- true + }() + + // Repeat to make sure the watched file/directory "survives" the REMOVE/CREATE loop. + for i := 1; i <= 3; i++ { + // The intermediate file is created in a folder outside the watcher + createAndSyncFile(t, intermediate) + + // 1. Swap + if err := unix.Exchangedata(intermediate, resolved, 0); err != nil { + t.Fatalf("[%d] exchangedata failed: %s", i, err) + } + + time.Sleep(50 * time.Millisecond) + + // 2. Delete the intermediate file + err := os.Remove(intermediate) + + if err != nil { + t.Fatalf("[%d] remove %s failed: %s", i, intermediate, err) + } + + time.Sleep(50 * time.Millisecond) + + } + + // We expect this event to be received almost immediately, but let's wait 500 ms to be sure + time.Sleep(500 * time.Millisecond) + + // The events will be (CHMOD + REMOVE + CREATE) X 2. Let's focus on the last two: + if removeReceived.value() < 3 { + t.Fatal("fsnotify remove events have not been received after 500 ms") + } + + if createReceived.value() < 3 { + t.Fatal("fsnotify create events have not been received after 500 ms") + } + + watcher.Close() + t.Log("waiting for the event channel to become closed...") + select { + case <-done: + t.Log("event channel closed") + case <-time.After(2 * time.Second): + t.Fatal("event stream was not closed after 2 seconds") + } +} + +// TestExchangedataInWatchedDir test exchangedata operation on file in watched dir. +func TestExchangedataInWatchedDir(t *testing.T) { + testExchangedataForWatcher(t, true) +} + +// TestExchangedataInWatchedDir test exchangedata operation on watched file. +func TestExchangedataInWatchedFile(t *testing.T) { + testExchangedataForWatcher(t, false) +} + +func createAndSyncFile(t *testing.T, filepath string) { + f1, err := os.OpenFile(filepath, os.O_WRONLY|os.O_CREATE, 0666) + if err != nil { + t.Fatalf("creating %s failed: %s", filepath, err) + } + f1.Sync() + f1.Close() +} diff --git a/vendor/github.com/fsnotify/fsnotify/integration_test.go b/vendor/github.com/fsnotify/fsnotify/integration_test.go new file mode 100644 index 000000000..8b7e9d3ec --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/integration_test.go @@ -0,0 +1,1237 @@ +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !plan9,!solaris + +package fsnotify + +import ( + "io/ioutil" + "os" + "os/exec" + "path" + "path/filepath" + "runtime" + "sync/atomic" + "testing" + "time" +) + +// An atomic counter +type counter struct { + val int32 +} + +func (c *counter) increment() { + atomic.AddInt32(&c.val, 1) +} + +func (c *counter) value() int32 { + return atomic.LoadInt32(&c.val) +} + +func (c *counter) reset() { + atomic.StoreInt32(&c.val, 0) +} + +// tempMkdir makes a temporary directory +func tempMkdir(t *testing.T) string { + dir, err := ioutil.TempDir("", "fsnotify") + if err != nil { + t.Fatalf("failed to create test directory: %s", err) + } + return dir +} + +// tempMkFile makes a temporary file. +func tempMkFile(t *testing.T, dir string) string { + f, err := ioutil.TempFile(dir, "fsnotify") + if err != nil { + t.Fatalf("failed to create test file: %v", err) + } + defer f.Close() + return f.Name() +} + +// newWatcher initializes an fsnotify Watcher instance. +func newWatcher(t *testing.T) *Watcher { + watcher, err := NewWatcher() + if err != nil { + t.Fatalf("NewWatcher() failed: %s", err) + } + return watcher +} + +// addWatch adds a watch for a directory +func addWatch(t *testing.T, watcher *Watcher, dir string) { + if err := watcher.Add(dir); err != nil { + t.Fatalf("watcher.Add(%q) failed: %s", dir, err) + } +} + +func TestFsnotifyMultipleOperations(t *testing.T) { + watcher := newWatcher(t) + + // Receive errors on the error channel on a separate goroutine + go func() { + for err := range watcher.Errors { + t.Fatalf("error received: %s", err) + } + }() + + // Create directory to watch + testDir := tempMkdir(t) + defer os.RemoveAll(testDir) + + // Create directory that's not watched + testDirToMoveFiles := tempMkdir(t) + defer os.RemoveAll(testDirToMoveFiles) + + testFile := filepath.Join(testDir, "TestFsnotifySeq.testfile") + testFileRenamed := filepath.Join(testDirToMoveFiles, "TestFsnotifySeqRename.testfile") + + addWatch(t, watcher, testDir) + + // Receive events on the event channel on a separate goroutine + eventstream := watcher.Events + var createReceived, modifyReceived, deleteReceived, renameReceived counter + done := make(chan bool) + go func() { + for event := range eventstream { + // Only count relevant events + if event.Name == filepath.Clean(testDir) || event.Name == filepath.Clean(testFile) { + t.Logf("event received: %s", event) + if event.Op&Remove == Remove { + deleteReceived.increment() + } + if event.Op&Write == Write { + modifyReceived.increment() + } + if event.Op&Create == Create { + createReceived.increment() + } + if event.Op&Rename == Rename { + renameReceived.increment() + } + } else { + t.Logf("unexpected event received: %s", event) + } + } + done <- true + }() + + // Create a file + // This should add at least one event to the fsnotify event queue + var f *os.File + f, err := os.OpenFile(testFile, os.O_WRONLY|os.O_CREATE, 0666) + if err != nil { + t.Fatalf("creating test file failed: %s", err) + } + f.Sync() + + time.Sleep(time.Millisecond) + f.WriteString("data") + f.Sync() + f.Close() + + time.Sleep(50 * time.Millisecond) // give system time to sync write change before delete + + if err := testRename(testFile, testFileRenamed); err != nil { + t.Fatalf("rename failed: %s", err) + } + + // Modify the file outside of the watched dir + f, err = os.Open(testFileRenamed) + if err != nil { + t.Fatalf("open test renamed file failed: %s", err) + } + f.WriteString("data") + f.Sync() + f.Close() + + time.Sleep(50 * time.Millisecond) // give system time to sync write change before delete + + // Recreate the file that was moved + f, err = os.OpenFile(testFile, os.O_WRONLY|os.O_CREATE, 0666) + if err != nil { + t.Fatalf("creating test file failed: %s", err) + } + f.Close() + time.Sleep(50 * time.Millisecond) // give system time to sync write change before delete + + // We expect this event to be received almost immediately, but let's wait 500 ms to be sure + time.Sleep(500 * time.Millisecond) + cReceived := createReceived.value() + if cReceived != 2 { + t.Fatalf("incorrect number of create events received after 500 ms (%d vs %d)", cReceived, 2) + } + mReceived := modifyReceived.value() + if mReceived != 1 { + t.Fatalf("incorrect number of modify events received after 500 ms (%d vs %d)", mReceived, 1) + } + dReceived := deleteReceived.value() + rReceived := renameReceived.value() + if dReceived+rReceived != 1 { + t.Fatalf("incorrect number of rename+delete events received after 500 ms (%d vs %d)", rReceived+dReceived, 1) + } + + // Try closing the fsnotify instance + t.Log("calling Close()") + watcher.Close() + t.Log("waiting for the event channel to become closed...") + select { + case <-done: + t.Log("event channel closed") + case <-time.After(2 * time.Second): + t.Fatal("event stream was not closed after 2 seconds") + } +} + +func TestFsnotifyMultipleCreates(t *testing.T) { + watcher := newWatcher(t) + + // Receive errors on the error channel on a separate goroutine + go func() { + for err := range watcher.Errors { + t.Fatalf("error received: %s", err) + } + }() + + // Create directory to watch + testDir := tempMkdir(t) + defer os.RemoveAll(testDir) + + testFile := filepath.Join(testDir, "TestFsnotifySeq.testfile") + + addWatch(t, watcher, testDir) + + // Receive events on the event channel on a separate goroutine + eventstream := watcher.Events + var createReceived, modifyReceived, deleteReceived counter + done := make(chan bool) + go func() { + for event := range eventstream { + // Only count relevant events + if event.Name == filepath.Clean(testDir) || event.Name == filepath.Clean(testFile) { + t.Logf("event received: %s", event) + if event.Op&Remove == Remove { + deleteReceived.increment() + } + if event.Op&Create == Create { + createReceived.increment() + } + if event.Op&Write == Write { + modifyReceived.increment() + } + } else { + t.Logf("unexpected event received: %s", event) + } + } + done <- true + }() + + // Create a file + // This should add at least one event to the fsnotify event queue + var f *os.File + f, err := os.OpenFile(testFile, os.O_WRONLY|os.O_CREATE, 0666) + if err != nil { + t.Fatalf("creating test file failed: %s", err) + } + f.Sync() + + time.Sleep(time.Millisecond) + f.WriteString("data") + f.Sync() + f.Close() + + time.Sleep(50 * time.Millisecond) // give system time to sync write change before delete + + os.Remove(testFile) + + time.Sleep(50 * time.Millisecond) // give system time to sync write change before delete + + // Recreate the file + f, err = os.OpenFile(testFile, os.O_WRONLY|os.O_CREATE, 0666) + if err != nil { + t.Fatalf("creating test file failed: %s", err) + } + f.Close() + time.Sleep(50 * time.Millisecond) // give system time to sync write change before delete + + // Modify + f, err = os.OpenFile(testFile, os.O_WRONLY, 0666) + if err != nil { + t.Fatalf("creating test file failed: %s", err) + } + f.Sync() + + time.Sleep(time.Millisecond) + f.WriteString("data") + f.Sync() + f.Close() + + time.Sleep(50 * time.Millisecond) // give system time to sync write change before delete + + // Modify + f, err = os.OpenFile(testFile, os.O_WRONLY, 0666) + if err != nil { + t.Fatalf("creating test file failed: %s", err) + } + f.Sync() + + time.Sleep(time.Millisecond) + f.WriteString("data") + f.Sync() + f.Close() + + time.Sleep(50 * time.Millisecond) // give system time to sync write change before delete + + // We expect this event to be received almost immediately, but let's wait 500 ms to be sure + time.Sleep(500 * time.Millisecond) + cReceived := createReceived.value() + if cReceived != 2 { + t.Fatalf("incorrect number of create events received after 500 ms (%d vs %d)", cReceived, 2) + } + mReceived := modifyReceived.value() + if mReceived < 3 { + t.Fatalf("incorrect number of modify events received after 500 ms (%d vs atleast %d)", mReceived, 3) + } + dReceived := deleteReceived.value() + if dReceived != 1 { + t.Fatalf("incorrect number of rename+delete events received after 500 ms (%d vs %d)", dReceived, 1) + } + + // Try closing the fsnotify instance + t.Log("calling Close()") + watcher.Close() + t.Log("waiting for the event channel to become closed...") + select { + case <-done: + t.Log("event channel closed") + case <-time.After(2 * time.Second): + t.Fatal("event stream was not closed after 2 seconds") + } +} + +func TestFsnotifyDirOnly(t *testing.T) { + watcher := newWatcher(t) + + // Create directory to watch + testDir := tempMkdir(t) + defer os.RemoveAll(testDir) + + // Create a file before watching directory + // This should NOT add any events to the fsnotify event queue + testFileAlreadyExists := filepath.Join(testDir, "TestFsnotifyEventsExisting.testfile") + { + var f *os.File + f, err := os.OpenFile(testFileAlreadyExists, os.O_WRONLY|os.O_CREATE, 0666) + if err != nil { + t.Fatalf("creating test file failed: %s", err) + } + f.Sync() + f.Close() + } + + addWatch(t, watcher, testDir) + + // Receive errors on the error channel on a separate goroutine + go func() { + for err := range watcher.Errors { + t.Fatalf("error received: %s", err) + } + }() + + testFile := filepath.Join(testDir, "TestFsnotifyDirOnly.testfile") + + // Receive events on the event channel on a separate goroutine + eventstream := watcher.Events + var createReceived, modifyReceived, deleteReceived counter + done := make(chan bool) + go func() { + for event := range eventstream { + // Only count relevant events + if event.Name == filepath.Clean(testDir) || event.Name == filepath.Clean(testFile) || event.Name == filepath.Clean(testFileAlreadyExists) { + t.Logf("event received: %s", event) + if event.Op&Remove == Remove { + deleteReceived.increment() + } + if event.Op&Write == Write { + modifyReceived.increment() + } + if event.Op&Create == Create { + createReceived.increment() + } + } else { + t.Logf("unexpected event received: %s", event) + } + } + done <- true + }() + + // Create a file + // This should add at least one event to the fsnotify event queue + var f *os.File + f, err := os.OpenFile(testFile, os.O_WRONLY|os.O_CREATE, 0666) + if err != nil { + t.Fatalf("creating test file failed: %s", err) + } + f.Sync() + + time.Sleep(time.Millisecond) + f.WriteString("data") + f.Sync() + f.Close() + + time.Sleep(50 * time.Millisecond) // give system time to sync write change before delete + + os.Remove(testFile) + os.Remove(testFileAlreadyExists) + + // We expect this event to be received almost immediately, but let's wait 500 ms to be sure + time.Sleep(500 * time.Millisecond) + cReceived := createReceived.value() + if cReceived != 1 { + t.Fatalf("incorrect number of create events received after 500 ms (%d vs %d)", cReceived, 1) + } + mReceived := modifyReceived.value() + if mReceived != 1 { + t.Fatalf("incorrect number of modify events received after 500 ms (%d vs %d)", mReceived, 1) + } + dReceived := deleteReceived.value() + if dReceived != 2 { + t.Fatalf("incorrect number of delete events received after 500 ms (%d vs %d)", dReceived, 2) + } + + // Try closing the fsnotify instance + t.Log("calling Close()") + watcher.Close() + t.Log("waiting for the event channel to become closed...") + select { + case <-done: + t.Log("event channel closed") + case <-time.After(2 * time.Second): + t.Fatal("event stream was not closed after 2 seconds") + } +} + +func TestFsnotifyDeleteWatchedDir(t *testing.T) { + watcher := newWatcher(t) + defer watcher.Close() + + // Create directory to watch + testDir := tempMkdir(t) + defer os.RemoveAll(testDir) + + // Create a file before watching directory + testFileAlreadyExists := filepath.Join(testDir, "TestFsnotifyEventsExisting.testfile") + { + var f *os.File + f, err := os.OpenFile(testFileAlreadyExists, os.O_WRONLY|os.O_CREATE, 0666) + if err != nil { + t.Fatalf("creating test file failed: %s", err) + } + f.Sync() + f.Close() + } + + addWatch(t, watcher, testDir) + + // Add a watch for testFile + addWatch(t, watcher, testFileAlreadyExists) + + // Receive errors on the error channel on a separate goroutine + go func() { + for err := range watcher.Errors { + t.Fatalf("error received: %s", err) + } + }() + + // Receive events on the event channel on a separate goroutine + eventstream := watcher.Events + var deleteReceived counter + go func() { + for event := range eventstream { + // Only count relevant events + if event.Name == filepath.Clean(testDir) || event.Name == filepath.Clean(testFileAlreadyExists) { + t.Logf("event received: %s", event) + if event.Op&Remove == Remove { + deleteReceived.increment() + } + } else { + t.Logf("unexpected event received: %s", event) + } + } + }() + + os.RemoveAll(testDir) + + // We expect this event to be received almost immediately, but let's wait 500 ms to be sure + time.Sleep(500 * time.Millisecond) + dReceived := deleteReceived.value() + if dReceived < 2 { + t.Fatalf("did not receive at least %d delete events, received %d after 500 ms", 2, dReceived) + } +} + +func TestFsnotifySubDir(t *testing.T) { + watcher := newWatcher(t) + + // Create directory to watch + testDir := tempMkdir(t) + defer os.RemoveAll(testDir) + + testFile1 := filepath.Join(testDir, "TestFsnotifyFile1.testfile") + testSubDir := filepath.Join(testDir, "sub") + testSubDirFile := filepath.Join(testDir, "sub/TestFsnotifyFile1.testfile") + + // Receive errors on the error channel on a separate goroutine + go func() { + for err := range watcher.Errors { + t.Fatalf("error received: %s", err) + } + }() + + // Receive events on the event channel on a separate goroutine + eventstream := watcher.Events + var createReceived, deleteReceived counter + done := make(chan bool) + go func() { + for event := range eventstream { + // Only count relevant events + if event.Name == filepath.Clean(testDir) || event.Name == filepath.Clean(testSubDir) || event.Name == filepath.Clean(testFile1) { + t.Logf("event received: %s", event) + if event.Op&Create == Create { + createReceived.increment() + } + if event.Op&Remove == Remove { + deleteReceived.increment() + } + } else { + t.Logf("unexpected event received: %s", event) + } + } + done <- true + }() + + addWatch(t, watcher, testDir) + + // Create sub-directory + if err := os.Mkdir(testSubDir, 0777); err != nil { + t.Fatalf("failed to create test sub-directory: %s", err) + } + + // Create a file + var f *os.File + f, err := os.OpenFile(testFile1, os.O_WRONLY|os.O_CREATE, 0666) + if err != nil { + t.Fatalf("creating test file failed: %s", err) + } + f.Sync() + f.Close() + + // Create a file (Should not see this! we are not watching subdir) + var fs *os.File + fs, err = os.OpenFile(testSubDirFile, os.O_WRONLY|os.O_CREATE, 0666) + if err != nil { + t.Fatalf("creating test file failed: %s", err) + } + fs.Sync() + fs.Close() + + time.Sleep(200 * time.Millisecond) + + // Make sure receive deletes for both file and sub-directory + os.RemoveAll(testSubDir) + os.Remove(testFile1) + + // We expect this event to be received almost immediately, but let's wait 500 ms to be sure + time.Sleep(500 * time.Millisecond) + cReceived := createReceived.value() + if cReceived != 2 { + t.Fatalf("incorrect number of create events received after 500 ms (%d vs %d)", cReceived, 2) + } + dReceived := deleteReceived.value() + if dReceived != 2 { + t.Fatalf("incorrect number of delete events received after 500 ms (%d vs %d)", dReceived, 2) + } + + // Try closing the fsnotify instance + t.Log("calling Close()") + watcher.Close() + t.Log("waiting for the event channel to become closed...") + select { + case <-done: + t.Log("event channel closed") + case <-time.After(2 * time.Second): + t.Fatal("event stream was not closed after 2 seconds") + } +} + +func TestFsnotifyRename(t *testing.T) { + watcher := newWatcher(t) + + // Create directory to watch + testDir := tempMkdir(t) + defer os.RemoveAll(testDir) + + addWatch(t, watcher, testDir) + + // Receive errors on the error channel on a separate goroutine + go func() { + for err := range watcher.Errors { + t.Fatalf("error received: %s", err) + } + }() + + testFile := filepath.Join(testDir, "TestFsnotifyEvents.testfile") + testFileRenamed := filepath.Join(testDir, "TestFsnotifyEvents.testfileRenamed") + + // Receive events on the event channel on a separate goroutine + eventstream := watcher.Events + var renameReceived counter + done := make(chan bool) + go func() { + for event := range eventstream { + // Only count relevant events + if event.Name == filepath.Clean(testDir) || event.Name == filepath.Clean(testFile) || event.Name == filepath.Clean(testFileRenamed) { + if event.Op&Rename == Rename { + renameReceived.increment() + } + t.Logf("event received: %s", event) + } else { + t.Logf("unexpected event received: %s", event) + } + } + done <- true + }() + + // Create a file + // This should add at least one event to the fsnotify event queue + var f *os.File + f, err := os.OpenFile(testFile, os.O_WRONLY|os.O_CREATE, 0666) + if err != nil { + t.Fatalf("creating test file failed: %s", err) + } + f.Sync() + + f.WriteString("data") + f.Sync() + f.Close() + + // Add a watch for testFile + addWatch(t, watcher, testFile) + + if err := testRename(testFile, testFileRenamed); err != nil { + t.Fatalf("rename failed: %s", err) + } + + // We expect this event to be received almost immediately, but let's wait 500 ms to be sure + time.Sleep(500 * time.Millisecond) + if renameReceived.value() == 0 { + t.Fatal("fsnotify rename events have not been received after 500 ms") + } + + // Try closing the fsnotify instance + t.Log("calling Close()") + watcher.Close() + t.Log("waiting for the event channel to become closed...") + select { + case <-done: + t.Log("event channel closed") + case <-time.After(2 * time.Second): + t.Fatal("event stream was not closed after 2 seconds") + } + + os.Remove(testFileRenamed) +} + +func TestFsnotifyRenameToCreate(t *testing.T) { + watcher := newWatcher(t) + + // Create directory to watch + testDir := tempMkdir(t) + defer os.RemoveAll(testDir) + + // Create directory to get file + testDirFrom := tempMkdir(t) + defer os.RemoveAll(testDirFrom) + + addWatch(t, watcher, testDir) + + // Receive errors on the error channel on a separate goroutine + go func() { + for err := range watcher.Errors { + t.Fatalf("error received: %s", err) + } + }() + + testFile := filepath.Join(testDirFrom, "TestFsnotifyEvents.testfile") + testFileRenamed := filepath.Join(testDir, "TestFsnotifyEvents.testfileRenamed") + + // Receive events on the event channel on a separate goroutine + eventstream := watcher.Events + var createReceived counter + done := make(chan bool) + go func() { + for event := range eventstream { + // Only count relevant events + if event.Name == filepath.Clean(testDir) || event.Name == filepath.Clean(testFile) || event.Name == filepath.Clean(testFileRenamed) { + if event.Op&Create == Create { + createReceived.increment() + } + t.Logf("event received: %s", event) + } else { + t.Logf("unexpected event received: %s", event) + } + } + done <- true + }() + + // Create a file + // This should add at least one event to the fsnotify event queue + var f *os.File + f, err := os.OpenFile(testFile, os.O_WRONLY|os.O_CREATE, 0666) + if err != nil { + t.Fatalf("creating test file failed: %s", err) + } + f.Sync() + f.Close() + + if err := testRename(testFile, testFileRenamed); err != nil { + t.Fatalf("rename failed: %s", err) + } + + // We expect this event to be received almost immediately, but let's wait 500 ms to be sure + time.Sleep(500 * time.Millisecond) + if createReceived.value() == 0 { + t.Fatal("fsnotify create events have not been received after 500 ms") + } + + // Try closing the fsnotify instance + t.Log("calling Close()") + watcher.Close() + t.Log("waiting for the event channel to become closed...") + select { + case <-done: + t.Log("event channel closed") + case <-time.After(2 * time.Second): + t.Fatal("event stream was not closed after 2 seconds") + } + + os.Remove(testFileRenamed) +} + +func TestFsnotifyRenameToOverwrite(t *testing.T) { + switch runtime.GOOS { + case "plan9", "windows": + t.Skipf("skipping test on %q (os.Rename over existing file does not create event).", runtime.GOOS) + } + + watcher := newWatcher(t) + + // Create directory to watch + testDir := tempMkdir(t) + defer os.RemoveAll(testDir) + + // Create directory to get file + testDirFrom := tempMkdir(t) + defer os.RemoveAll(testDirFrom) + + testFile := filepath.Join(testDirFrom, "TestFsnotifyEvents.testfile") + testFileRenamed := filepath.Join(testDir, "TestFsnotifyEvents.testfileRenamed") + + // Create a file + var fr *os.File + fr, err := os.OpenFile(testFileRenamed, os.O_WRONLY|os.O_CREATE, 0666) + if err != nil { + t.Fatalf("creating test file failed: %s", err) + } + fr.Sync() + fr.Close() + + addWatch(t, watcher, testDir) + + // Receive errors on the error channel on a separate goroutine + go func() { + for err := range watcher.Errors { + t.Fatalf("error received: %s", err) + } + }() + + // Receive events on the event channel on a separate goroutine + eventstream := watcher.Events + var eventReceived counter + done := make(chan bool) + go func() { + for event := range eventstream { + // Only count relevant events + if event.Name == filepath.Clean(testFileRenamed) { + eventReceived.increment() + t.Logf("event received: %s", event) + } else { + t.Logf("unexpected event received: %s", event) + } + } + done <- true + }() + + // Create a file + // This should add at least one event to the fsnotify event queue + var f *os.File + f, err = os.OpenFile(testFile, os.O_WRONLY|os.O_CREATE, 0666) + if err != nil { + t.Fatalf("creating test file failed: %s", err) + } + f.Sync() + f.Close() + + if err := testRename(testFile, testFileRenamed); err != nil { + t.Fatalf("rename failed: %s", err) + } + + // We expect this event to be received almost immediately, but let's wait 500 ms to be sure + time.Sleep(500 * time.Millisecond) + if eventReceived.value() == 0 { + t.Fatal("fsnotify events have not been received after 500 ms") + } + + // Try closing the fsnotify instance + t.Log("calling Close()") + watcher.Close() + t.Log("waiting for the event channel to become closed...") + select { + case <-done: + t.Log("event channel closed") + case <-time.After(2 * time.Second): + t.Fatal("event stream was not closed after 2 seconds") + } + + os.Remove(testFileRenamed) +} + +func TestRemovalOfWatch(t *testing.T) { + // Create directory to watch + testDir := tempMkdir(t) + defer os.RemoveAll(testDir) + + // Create a file before watching directory + testFileAlreadyExists := filepath.Join(testDir, "TestFsnotifyEventsExisting.testfile") + { + var f *os.File + f, err := os.OpenFile(testFileAlreadyExists, os.O_WRONLY|os.O_CREATE, 0666) + if err != nil { + t.Fatalf("creating test file failed: %s", err) + } + f.Sync() + f.Close() + } + + watcher := newWatcher(t) + defer watcher.Close() + + addWatch(t, watcher, testDir) + if err := watcher.Remove(testDir); err != nil { + t.Fatalf("Could not remove the watch: %v\n", err) + } + + go func() { + select { + case ev := <-watcher.Events: + t.Fatalf("We received event: %v\n", ev) + case <-time.After(500 * time.Millisecond): + t.Log("No event received, as expected.") + } + }() + + time.Sleep(200 * time.Millisecond) + // Modify the file outside of the watched dir + f, err := os.Open(testFileAlreadyExists) + if err != nil { + t.Fatalf("Open test file failed: %s", err) + } + f.WriteString("data") + f.Sync() + f.Close() + if err := os.Chmod(testFileAlreadyExists, 0700); err != nil { + t.Fatalf("chmod failed: %s", err) + } + time.Sleep(400 * time.Millisecond) +} + +func TestFsnotifyAttrib(t *testing.T) { + if runtime.GOOS == "windows" { + t.Skip("attributes don't work on Windows.") + } + + watcher := newWatcher(t) + + // Create directory to watch + testDir := tempMkdir(t) + defer os.RemoveAll(testDir) + + // Receive errors on the error channel on a separate goroutine + go func() { + for err := range watcher.Errors { + t.Fatalf("error received: %s", err) + } + }() + + testFile := filepath.Join(testDir, "TestFsnotifyAttrib.testfile") + + // Receive events on the event channel on a separate goroutine + eventstream := watcher.Events + // The modifyReceived counter counts IsModify events that are not IsAttrib, + // and the attribReceived counts IsAttrib events (which are also IsModify as + // a consequence). + var modifyReceived counter + var attribReceived counter + done := make(chan bool) + go func() { + for event := range eventstream { + // Only count relevant events + if event.Name == filepath.Clean(testDir) || event.Name == filepath.Clean(testFile) { + if event.Op&Write == Write { + modifyReceived.increment() + } + if event.Op&Chmod == Chmod { + attribReceived.increment() + } + t.Logf("event received: %s", event) + } else { + t.Logf("unexpected event received: %s", event) + } + } + done <- true + }() + + // Create a file + // This should add at least one event to the fsnotify event queue + var f *os.File + f, err := os.OpenFile(testFile, os.O_WRONLY|os.O_CREATE, 0666) + if err != nil { + t.Fatalf("creating test file failed: %s", err) + } + f.Sync() + + f.WriteString("data") + f.Sync() + f.Close() + + // Add a watch for testFile + addWatch(t, watcher, testFile) + + if err := os.Chmod(testFile, 0700); err != nil { + t.Fatalf("chmod failed: %s", err) + } + + // We expect this event to be received almost immediately, but let's wait 500 ms to be sure + // Creating/writing a file changes also the mtime, so IsAttrib should be set to true here + time.Sleep(500 * time.Millisecond) + if modifyReceived.value() != 0 { + t.Fatal("received an unexpected modify event when creating a test file") + } + if attribReceived.value() == 0 { + t.Fatal("fsnotify attribute events have not received after 500 ms") + } + + // Modifying the contents of the file does not set the attrib flag (although eg. the mtime + // might have been modified). + modifyReceived.reset() + attribReceived.reset() + + f, err = os.OpenFile(testFile, os.O_WRONLY, 0) + if err != nil { + t.Fatalf("reopening test file failed: %s", err) + } + + f.WriteString("more data") + f.Sync() + f.Close() + + time.Sleep(500 * time.Millisecond) + + if modifyReceived.value() != 1 { + t.Fatal("didn't receive a modify event after changing test file contents") + } + + if attribReceived.value() != 0 { + t.Fatal("did receive an unexpected attrib event after changing test file contents") + } + + modifyReceived.reset() + attribReceived.reset() + + // Doing a chmod on the file should trigger an event with the "attrib" flag set (the contents + // of the file are not changed though) + if err := os.Chmod(testFile, 0600); err != nil { + t.Fatalf("chmod failed: %s", err) + } + + time.Sleep(500 * time.Millisecond) + + if attribReceived.value() != 1 { + t.Fatal("didn't receive an attribute change after 500ms") + } + + // Try closing the fsnotify instance + t.Log("calling Close()") + watcher.Close() + t.Log("waiting for the event channel to become closed...") + select { + case <-done: + t.Log("event channel closed") + case <-time.After(1e9): + t.Fatal("event stream was not closed after 1 second") + } + + os.Remove(testFile) +} + +func TestFsnotifyClose(t *testing.T) { + watcher := newWatcher(t) + watcher.Close() + + var done int32 + go func() { + watcher.Close() + atomic.StoreInt32(&done, 1) + }() + + time.Sleep(50e6) // 50 ms + if atomic.LoadInt32(&done) == 0 { + t.Fatal("double Close() test failed: second Close() call didn't return") + } + + testDir := tempMkdir(t) + defer os.RemoveAll(testDir) + + if err := watcher.Add(testDir); err == nil { + t.Fatal("expected error on Watch() after Close(), got nil") + } +} + +func TestFsnotifyFakeSymlink(t *testing.T) { + if runtime.GOOS == "windows" { + t.Skip("symlinks don't work on Windows.") + } + + watcher := newWatcher(t) + + // Create directory to watch + testDir := tempMkdir(t) + defer os.RemoveAll(testDir) + + var errorsReceived counter + // Receive errors on the error channel on a separate goroutine + go func() { + for errors := range watcher.Errors { + t.Logf("Received error: %s", errors) + errorsReceived.increment() + } + }() + + // Count the CREATE events received + var createEventsReceived, otherEventsReceived counter + go func() { + for ev := range watcher.Events { + t.Logf("event received: %s", ev) + if ev.Op&Create == Create { + createEventsReceived.increment() + } else { + otherEventsReceived.increment() + } + } + }() + + addWatch(t, watcher, testDir) + + if err := os.Symlink(filepath.Join(testDir, "zzz"), filepath.Join(testDir, "zzznew")); err != nil { + t.Fatalf("Failed to create bogus symlink: %s", err) + } + t.Logf("Created bogus symlink") + + // We expect this event to be received almost immediately, but let's wait 500 ms to be sure + time.Sleep(500 * time.Millisecond) + + // Should not be error, just no events for broken links (watching nothing) + if errorsReceived.value() > 0 { + t.Fatal("fsnotify errors have been received.") + } + if otherEventsReceived.value() > 0 { + t.Fatal("fsnotify other events received on the broken link") + } + + // Except for 1 create event (for the link itself) + if createEventsReceived.value() == 0 { + t.Fatal("fsnotify create events were not received after 500 ms") + } + if createEventsReceived.value() > 1 { + t.Fatal("fsnotify more create events received than expected") + } + + // Try closing the fsnotify instance + t.Log("calling Close()") + watcher.Close() +} + +func TestCyclicSymlink(t *testing.T) { + if runtime.GOOS == "windows" { + t.Skip("symlinks don't work on Windows.") + } + + watcher := newWatcher(t) + + testDir := tempMkdir(t) + defer os.RemoveAll(testDir) + + link := path.Join(testDir, "link") + if err := os.Symlink(".", link); err != nil { + t.Fatalf("could not make symlink: %v", err) + } + addWatch(t, watcher, testDir) + + var createEventsReceived counter + go func() { + for ev := range watcher.Events { + if ev.Op&Create == Create { + createEventsReceived.increment() + } + } + }() + + if err := os.Remove(link); err != nil { + t.Fatalf("Error removing link: %v", err) + } + + // It would be nice to be able to expect a delete event here, but kqueue has + // no way for us to get events on symlinks themselves, because opening them + // opens an fd to the file to which they point. + + if err := ioutil.WriteFile(link, []byte("foo"), 0700); err != nil { + t.Fatalf("could not make symlink: %v", err) + } + + // We expect this event to be received almost immediately, but let's wait 500 ms to be sure + time.Sleep(500 * time.Millisecond) + + if got := createEventsReceived.value(); got == 0 { + t.Errorf("want at least 1 create event got %v", got) + } + + watcher.Close() +} + +// TestConcurrentRemovalOfWatch tests that concurrent calls to RemoveWatch do not race. +// See https://codereview.appspot.com/103300045/ +// go test -test.run=TestConcurrentRemovalOfWatch -test.cpu=1,1,1,1,1 -race +func TestConcurrentRemovalOfWatch(t *testing.T) { + if runtime.GOOS != "darwin" { + t.Skip("regression test for race only present on darwin") + } + + // Create directory to watch + testDir := tempMkdir(t) + defer os.RemoveAll(testDir) + + // Create a file before watching directory + testFileAlreadyExists := filepath.Join(testDir, "TestFsnotifyEventsExisting.testfile") + { + var f *os.File + f, err := os.OpenFile(testFileAlreadyExists, os.O_WRONLY|os.O_CREATE, 0666) + if err != nil { + t.Fatalf("creating test file failed: %s", err) + } + f.Sync() + f.Close() + } + + watcher := newWatcher(t) + defer watcher.Close() + + addWatch(t, watcher, testDir) + + // Test that RemoveWatch can be invoked concurrently, with no data races. + removed1 := make(chan struct{}) + go func() { + defer close(removed1) + watcher.Remove(testDir) + }() + removed2 := make(chan struct{}) + go func() { + close(removed2) + watcher.Remove(testDir) + }() + <-removed1 + <-removed2 +} + +func TestClose(t *testing.T) { + // Regression test for #59 bad file descriptor from Close + testDir := tempMkdir(t) + defer os.RemoveAll(testDir) + + watcher := newWatcher(t) + if err := watcher.Add(testDir); err != nil { + t.Fatalf("Expected no error on Add, got %v", err) + } + err := watcher.Close() + if err != nil { + t.Fatalf("Expected no error on Close, got %v.", err) + } +} + +// TestRemoveWithClose tests if one can handle Remove events and, at the same +// time, close Watcher object without any data races. +func TestRemoveWithClose(t *testing.T) { + testDir := tempMkdir(t) + defer os.RemoveAll(testDir) + + const fileN = 200 + tempFiles := make([]string, 0, fileN) + for i := 0; i < fileN; i++ { + tempFiles = append(tempFiles, tempMkFile(t, testDir)) + } + watcher := newWatcher(t) + if err := watcher.Add(testDir); err != nil { + t.Fatalf("Expected no error on Add, got %v", err) + } + startC, stopC := make(chan struct{}), make(chan struct{}) + errC := make(chan error) + go func() { + for { + select { + case <-watcher.Errors: + case <-watcher.Events: + case <-stopC: + return + } + } + }() + go func() { + <-startC + for _, fileName := range tempFiles { + os.Remove(fileName) + } + }() + go func() { + <-startC + errC <- watcher.Close() + }() + close(startC) + defer close(stopC) + if err := <-errC; err != nil { + t.Fatalf("Expected no error on Close, got %v.", err) + } +} + +func testRename(file1, file2 string) error { + switch runtime.GOOS { + case "windows", "plan9": + return os.Rename(file1, file2) + default: + cmd := exec.Command("mv", file1, file2) + return cmd.Run() + } +} diff --git a/vendor/github.com/fsnotify/fsnotify/kqueue.go b/vendor/github.com/fsnotify/fsnotify/kqueue.go new file mode 100644 index 000000000..c2b4acb18 --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/kqueue.go @@ -0,0 +1,503 @@ +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build freebsd openbsd netbsd dragonfly darwin + +package fsnotify + +import ( + "errors" + "fmt" + "io/ioutil" + "os" + "path/filepath" + "sync" + "time" + + "golang.org/x/sys/unix" +) + +// Watcher watches a set of files, delivering events to a channel. +type Watcher struct { + Events chan Event + Errors chan error + done chan bool // Channel for sending a "quit message" to the reader goroutine + + kq int // File descriptor (as returned by the kqueue() syscall). + + mu sync.Mutex // Protects access to watcher data + watches map[string]int // Map of watched file descriptors (key: path). + externalWatches map[string]bool // Map of watches added by user of the library. + dirFlags map[string]uint32 // Map of watched directories to fflags used in kqueue. + paths map[int]pathInfo // Map file descriptors to path names for processing kqueue events. + fileExists map[string]bool // Keep track of if we know this file exists (to stop duplicate create events). + isClosed bool // Set to true when Close() is first called +} + +type pathInfo struct { + name string + isDir bool +} + +// NewWatcher establishes a new watcher with the underlying OS and begins waiting for events. +func NewWatcher() (*Watcher, error) { + kq, err := kqueue() + if err != nil { + return nil, err + } + + w := &Watcher{ + kq: kq, + watches: make(map[string]int), + dirFlags: make(map[string]uint32), + paths: make(map[int]pathInfo), + fileExists: make(map[string]bool), + externalWatches: make(map[string]bool), + Events: make(chan Event), + Errors: make(chan error), + done: make(chan bool), + } + + go w.readEvents() + return w, nil +} + +// Close removes all watches and closes the events channel. +func (w *Watcher) Close() error { + w.mu.Lock() + if w.isClosed { + w.mu.Unlock() + return nil + } + w.isClosed = true + w.mu.Unlock() + + // copy paths to remove while locked + w.mu.Lock() + var pathsToRemove = make([]string, 0, len(w.watches)) + for name := range w.watches { + pathsToRemove = append(pathsToRemove, name) + } + w.mu.Unlock() + // unlock before calling Remove, which also locks + + var err error + for _, name := range pathsToRemove { + if e := w.Remove(name); e != nil && err == nil { + err = e + } + } + + // Send "quit" message to the reader goroutine: + w.done <- true + + return nil +} + +// Add starts watching the named file or directory (non-recursively). +func (w *Watcher) Add(name string) error { + w.mu.Lock() + w.externalWatches[name] = true + w.mu.Unlock() + _, err := w.addWatch(name, noteAllEvents) + return err +} + +// Remove stops watching the the named file or directory (non-recursively). +func (w *Watcher) Remove(name string) error { + name = filepath.Clean(name) + w.mu.Lock() + watchfd, ok := w.watches[name] + w.mu.Unlock() + if !ok { + return fmt.Errorf("can't remove non-existent kevent watch for: %s", name) + } + + const registerRemove = unix.EV_DELETE + if err := register(w.kq, []int{watchfd}, registerRemove, 0); err != nil { + return err + } + + unix.Close(watchfd) + + w.mu.Lock() + isDir := w.paths[watchfd].isDir + delete(w.watches, name) + delete(w.paths, watchfd) + delete(w.dirFlags, name) + w.mu.Unlock() + + // Find all watched paths that are in this directory that are not external. + if isDir { + var pathsToRemove []string + w.mu.Lock() + for _, path := range w.paths { + wdir, _ := filepath.Split(path.name) + if filepath.Clean(wdir) == name { + if !w.externalWatches[path.name] { + pathsToRemove = append(pathsToRemove, path.name) + } + } + } + w.mu.Unlock() + for _, name := range pathsToRemove { + // Since these are internal, not much sense in propagating error + // to the user, as that will just confuse them with an error about + // a path they did not explicitly watch themselves. + w.Remove(name) + } + } + + return nil +} + +// Watch all events (except NOTE_EXTEND, NOTE_LINK, NOTE_REVOKE) +const noteAllEvents = unix.NOTE_DELETE | unix.NOTE_WRITE | unix.NOTE_ATTRIB | unix.NOTE_RENAME + +// keventWaitTime to block on each read from kevent +var keventWaitTime = durationToTimespec(100 * time.Millisecond) + +// addWatch adds name to the watched file set. +// The flags are interpreted as described in kevent(2). +// Returns the real path to the file which was added, if any, which may be different from the one passed in the case of symlinks. +func (w *Watcher) addWatch(name string, flags uint32) (string, error) { + var isDir bool + // Make ./name and name equivalent + name = filepath.Clean(name) + + w.mu.Lock() + if w.isClosed { + w.mu.Unlock() + return "", errors.New("kevent instance already closed") + } + watchfd, alreadyWatching := w.watches[name] + // We already have a watch, but we can still override flags. + if alreadyWatching { + isDir = w.paths[watchfd].isDir + } + w.mu.Unlock() + + if !alreadyWatching { + fi, err := os.Lstat(name) + if err != nil { + return "", err + } + + // Don't watch sockets. + if fi.Mode()&os.ModeSocket == os.ModeSocket { + return "", nil + } + + // Don't watch named pipes. + if fi.Mode()&os.ModeNamedPipe == os.ModeNamedPipe { + return "", nil + } + + // Follow Symlinks + // Unfortunately, Linux can add bogus symlinks to watch list without + // issue, and Windows can't do symlinks period (AFAIK). To maintain + // consistency, we will act like everything is fine. There will simply + // be no file events for broken symlinks. + // Hence the returns of nil on errors. + if fi.Mode()&os.ModeSymlink == os.ModeSymlink { + name, err = filepath.EvalSymlinks(name) + if err != nil { + return "", nil + } + + w.mu.Lock() + _, alreadyWatching = w.watches[name] + w.mu.Unlock() + + if alreadyWatching { + return name, nil + } + + fi, err = os.Lstat(name) + if err != nil { + return "", nil + } + } + + watchfd, err = unix.Open(name, openMode, 0700) + if watchfd == -1 { + return "", err + } + + isDir = fi.IsDir() + } + + const registerAdd = unix.EV_ADD | unix.EV_CLEAR | unix.EV_ENABLE + if err := register(w.kq, []int{watchfd}, registerAdd, flags); err != nil { + unix.Close(watchfd) + return "", err + } + + if !alreadyWatching { + w.mu.Lock() + w.watches[name] = watchfd + w.paths[watchfd] = pathInfo{name: name, isDir: isDir} + w.mu.Unlock() + } + + if isDir { + // Watch the directory if it has not been watched before, + // or if it was watched before, but perhaps only a NOTE_DELETE (watchDirectoryFiles) + w.mu.Lock() + + watchDir := (flags&unix.NOTE_WRITE) == unix.NOTE_WRITE && + (!alreadyWatching || (w.dirFlags[name]&unix.NOTE_WRITE) != unix.NOTE_WRITE) + // Store flags so this watch can be updated later + w.dirFlags[name] = flags + w.mu.Unlock() + + if watchDir { + if err := w.watchDirectoryFiles(name); err != nil { + return "", err + } + } + } + return name, nil +} + +// readEvents reads from kqueue and converts the received kevents into +// Event values that it sends down the Events channel. +func (w *Watcher) readEvents() { + eventBuffer := make([]unix.Kevent_t, 10) + + for { + // See if there is a message on the "done" channel + select { + case <-w.done: + err := unix.Close(w.kq) + if err != nil { + w.Errors <- err + } + close(w.Events) + close(w.Errors) + return + default: + } + + // Get new events + kevents, err := read(w.kq, eventBuffer, &keventWaitTime) + // EINTR is okay, the syscall was interrupted before timeout expired. + if err != nil && err != unix.EINTR { + w.Errors <- err + continue + } + + // Flush the events we received to the Events channel + for len(kevents) > 0 { + kevent := &kevents[0] + watchfd := int(kevent.Ident) + mask := uint32(kevent.Fflags) + w.mu.Lock() + path := w.paths[watchfd] + w.mu.Unlock() + event := newEvent(path.name, mask) + + if path.isDir && !(event.Op&Remove == Remove) { + // Double check to make sure the directory exists. This can happen when + // we do a rm -fr on a recursively watched folders and we receive a + // modification event first but the folder has been deleted and later + // receive the delete event + if _, err := os.Lstat(event.Name); os.IsNotExist(err) { + // mark is as delete event + event.Op |= Remove + } + } + + if event.Op&Rename == Rename || event.Op&Remove == Remove { + w.Remove(event.Name) + w.mu.Lock() + delete(w.fileExists, event.Name) + w.mu.Unlock() + } + + if path.isDir && event.Op&Write == Write && !(event.Op&Remove == Remove) { + w.sendDirectoryChangeEvents(event.Name) + } else { + // Send the event on the Events channel + w.Events <- event + } + + if event.Op&Remove == Remove { + // Look for a file that may have overwritten this. + // For example, mv f1 f2 will delete f2, then create f2. + if path.isDir { + fileDir := filepath.Clean(event.Name) + w.mu.Lock() + _, found := w.watches[fileDir] + w.mu.Unlock() + if found { + // make sure the directory exists before we watch for changes. When we + // do a recursive watch and perform rm -fr, the parent directory might + // have gone missing, ignore the missing directory and let the + // upcoming delete event remove the watch from the parent directory. + if _, err := os.Lstat(fileDir); err == nil { + w.sendDirectoryChangeEvents(fileDir) + } + } + } else { + filePath := filepath.Clean(event.Name) + if fileInfo, err := os.Lstat(filePath); err == nil { + w.sendFileCreatedEventIfNew(filePath, fileInfo) + } + } + } + + // Move to next event + kevents = kevents[1:] + } + } +} + +// newEvent returns an platform-independent Event based on kqueue Fflags. +func newEvent(name string, mask uint32) Event { + e := Event{Name: name} + if mask&unix.NOTE_DELETE == unix.NOTE_DELETE { + e.Op |= Remove + } + if mask&unix.NOTE_WRITE == unix.NOTE_WRITE { + e.Op |= Write + } + if mask&unix.NOTE_RENAME == unix.NOTE_RENAME { + e.Op |= Rename + } + if mask&unix.NOTE_ATTRIB == unix.NOTE_ATTRIB { + e.Op |= Chmod + } + return e +} + +func newCreateEvent(name string) Event { + return Event{Name: name, Op: Create} +} + +// watchDirectoryFiles to mimic inotify when adding a watch on a directory +func (w *Watcher) watchDirectoryFiles(dirPath string) error { + // Get all files + files, err := ioutil.ReadDir(dirPath) + if err != nil { + return err + } + + for _, fileInfo := range files { + filePath := filepath.Join(dirPath, fileInfo.Name()) + filePath, err = w.internalWatch(filePath, fileInfo) + if err != nil { + return err + } + + w.mu.Lock() + w.fileExists[filePath] = true + w.mu.Unlock() + } + + return nil +} + +// sendDirectoryEvents searches the directory for newly created files +// and sends them over the event channel. This functionality is to have +// the BSD version of fsnotify match Linux inotify which provides a +// create event for files created in a watched directory. +func (w *Watcher) sendDirectoryChangeEvents(dirPath string) { + // Get all files + files, err := ioutil.ReadDir(dirPath) + if err != nil { + w.Errors <- err + } + + // Search for new files + for _, fileInfo := range files { + filePath := filepath.Join(dirPath, fileInfo.Name()) + err := w.sendFileCreatedEventIfNew(filePath, fileInfo) + + if err != nil { + return + } + } +} + +// sendFileCreatedEvent sends a create event if the file isn't already being tracked. +func (w *Watcher) sendFileCreatedEventIfNew(filePath string, fileInfo os.FileInfo) (err error) { + w.mu.Lock() + _, doesExist := w.fileExists[filePath] + w.mu.Unlock() + if !doesExist { + // Send create event + w.Events <- newCreateEvent(filePath) + } + + // like watchDirectoryFiles (but without doing another ReadDir) + filePath, err = w.internalWatch(filePath, fileInfo) + if err != nil { + return err + } + + w.mu.Lock() + w.fileExists[filePath] = true + w.mu.Unlock() + + return nil +} + +func (w *Watcher) internalWatch(name string, fileInfo os.FileInfo) (string, error) { + if fileInfo.IsDir() { + // mimic Linux providing delete events for subdirectories + // but preserve the flags used if currently watching subdirectory + w.mu.Lock() + flags := w.dirFlags[name] + w.mu.Unlock() + + flags |= unix.NOTE_DELETE | unix.NOTE_RENAME + return w.addWatch(name, flags) + } + + // watch file to mimic Linux inotify + return w.addWatch(name, noteAllEvents) +} + +// kqueue creates a new kernel event queue and returns a descriptor. +func kqueue() (kq int, err error) { + kq, err = unix.Kqueue() + if kq == -1 { + return kq, err + } + return kq, nil +} + +// register events with the queue +func register(kq int, fds []int, flags int, fflags uint32) error { + changes := make([]unix.Kevent_t, len(fds)) + + for i, fd := range fds { + // SetKevent converts int to the platform-specific types: + unix.SetKevent(&changes[i], fd, unix.EVFILT_VNODE, flags) + changes[i].Fflags = fflags + } + + // register the events + success, err := unix.Kevent(kq, changes, nil, nil) + if success == -1 { + return err + } + return nil +} + +// read retrieves pending events, or waits until an event occurs. +// A timeout of nil blocks indefinitely, while 0 polls the queue. +func read(kq int, events []unix.Kevent_t, timeout *unix.Timespec) ([]unix.Kevent_t, error) { + n, err := unix.Kevent(kq, nil, events, timeout) + if err != nil { + return nil, err + } + return events[0:n], nil +} + +// durationToTimespec prepares a timeout value +func durationToTimespec(d time.Duration) unix.Timespec { + return unix.NsecToTimespec(d.Nanoseconds()) +} diff --git a/vendor/github.com/fsnotify/fsnotify/open_mode_bsd.go b/vendor/github.com/fsnotify/fsnotify/open_mode_bsd.go new file mode 100644 index 000000000..7d8de1451 --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/open_mode_bsd.go @@ -0,0 +1,11 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build freebsd openbsd netbsd dragonfly + +package fsnotify + +import "golang.org/x/sys/unix" + +const openMode = unix.O_NONBLOCK | unix.O_RDONLY diff --git a/vendor/github.com/fsnotify/fsnotify/open_mode_darwin.go b/vendor/github.com/fsnotify/fsnotify/open_mode_darwin.go new file mode 100644 index 000000000..9139e1716 --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/open_mode_darwin.go @@ -0,0 +1,12 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build darwin + +package fsnotify + +import "golang.org/x/sys/unix" + +// note: this constant is not defined on BSD +const openMode = unix.O_EVTONLY diff --git a/vendor/github.com/fsnotify/fsnotify/windows.go b/vendor/github.com/fsnotify/fsnotify/windows.go new file mode 100644 index 000000000..09436f31d --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/windows.go @@ -0,0 +1,561 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build windows + +package fsnotify + +import ( + "errors" + "fmt" + "os" + "path/filepath" + "runtime" + "sync" + "syscall" + "unsafe" +) + +// Watcher watches a set of files, delivering events to a channel. +type Watcher struct { + Events chan Event + Errors chan error + isClosed bool // Set to true when Close() is first called + mu sync.Mutex // Map access + port syscall.Handle // Handle to completion port + watches watchMap // Map of watches (key: i-number) + input chan *input // Inputs to the reader are sent on this channel + quit chan chan<- error +} + +// NewWatcher establishes a new watcher with the underlying OS and begins waiting for events. +func NewWatcher() (*Watcher, error) { + port, e := syscall.CreateIoCompletionPort(syscall.InvalidHandle, 0, 0, 0) + if e != nil { + return nil, os.NewSyscallError("CreateIoCompletionPort", e) + } + w := &Watcher{ + port: port, + watches: make(watchMap), + input: make(chan *input, 1), + Events: make(chan Event, 50), + Errors: make(chan error), + quit: make(chan chan<- error, 1), + } + go w.readEvents() + return w, nil +} + +// Close removes all watches and closes the events channel. +func (w *Watcher) Close() error { + if w.isClosed { + return nil + } + w.isClosed = true + + // Send "quit" message to the reader goroutine + ch := make(chan error) + w.quit <- ch + if err := w.wakeupReader(); err != nil { + return err + } + return <-ch +} + +// Add starts watching the named file or directory (non-recursively). +func (w *Watcher) Add(name string) error { + if w.isClosed { + return errors.New("watcher already closed") + } + in := &input{ + op: opAddWatch, + path: filepath.Clean(name), + flags: sysFSALLEVENTS, + reply: make(chan error), + } + w.input <- in + if err := w.wakeupReader(); err != nil { + return err + } + return <-in.reply +} + +// Remove stops watching the the named file or directory (non-recursively). +func (w *Watcher) Remove(name string) error { + in := &input{ + op: opRemoveWatch, + path: filepath.Clean(name), + reply: make(chan error), + } + w.input <- in + if err := w.wakeupReader(); err != nil { + return err + } + return <-in.reply +} + +const ( + // Options for AddWatch + sysFSONESHOT = 0x80000000 + sysFSONLYDIR = 0x1000000 + + // Events + sysFSACCESS = 0x1 + sysFSALLEVENTS = 0xfff + sysFSATTRIB = 0x4 + sysFSCLOSE = 0x18 + sysFSCREATE = 0x100 + sysFSDELETE = 0x200 + sysFSDELETESELF = 0x400 + sysFSMODIFY = 0x2 + sysFSMOVE = 0xc0 + sysFSMOVEDFROM = 0x40 + sysFSMOVEDTO = 0x80 + sysFSMOVESELF = 0x800 + + // Special events + sysFSIGNORED = 0x8000 + sysFSQOVERFLOW = 0x4000 +) + +func newEvent(name string, mask uint32) Event { + e := Event{Name: name} + if mask&sysFSCREATE == sysFSCREATE || mask&sysFSMOVEDTO == sysFSMOVEDTO { + e.Op |= Create + } + if mask&sysFSDELETE == sysFSDELETE || mask&sysFSDELETESELF == sysFSDELETESELF { + e.Op |= Remove + } + if mask&sysFSMODIFY == sysFSMODIFY { + e.Op |= Write + } + if mask&sysFSMOVE == sysFSMOVE || mask&sysFSMOVESELF == sysFSMOVESELF || mask&sysFSMOVEDFROM == sysFSMOVEDFROM { + e.Op |= Rename + } + if mask&sysFSATTRIB == sysFSATTRIB { + e.Op |= Chmod + } + return e +} + +const ( + opAddWatch = iota + opRemoveWatch +) + +const ( + provisional uint64 = 1 << (32 + iota) +) + +type input struct { + op int + path string + flags uint32 + reply chan error +} + +type inode struct { + handle syscall.Handle + volume uint32 + index uint64 +} + +type watch struct { + ov syscall.Overlapped + ino *inode // i-number + path string // Directory path + mask uint64 // Directory itself is being watched with these notify flags + names map[string]uint64 // Map of names being watched and their notify flags + rename string // Remembers the old name while renaming a file + buf [4096]byte +} + +type indexMap map[uint64]*watch +type watchMap map[uint32]indexMap + +func (w *Watcher) wakeupReader() error { + e := syscall.PostQueuedCompletionStatus(w.port, 0, 0, nil) + if e != nil { + return os.NewSyscallError("PostQueuedCompletionStatus", e) + } + return nil +} + +func getDir(pathname string) (dir string, err error) { + attr, e := syscall.GetFileAttributes(syscall.StringToUTF16Ptr(pathname)) + if e != nil { + return "", os.NewSyscallError("GetFileAttributes", e) + } + if attr&syscall.FILE_ATTRIBUTE_DIRECTORY != 0 { + dir = pathname + } else { + dir, _ = filepath.Split(pathname) + dir = filepath.Clean(dir) + } + return +} + +func getIno(path string) (ino *inode, err error) { + h, e := syscall.CreateFile(syscall.StringToUTF16Ptr(path), + syscall.FILE_LIST_DIRECTORY, + syscall.FILE_SHARE_READ|syscall.FILE_SHARE_WRITE|syscall.FILE_SHARE_DELETE, + nil, syscall.OPEN_EXISTING, + syscall.FILE_FLAG_BACKUP_SEMANTICS|syscall.FILE_FLAG_OVERLAPPED, 0) + if e != nil { + return nil, os.NewSyscallError("CreateFile", e) + } + var fi syscall.ByHandleFileInformation + if e = syscall.GetFileInformationByHandle(h, &fi); e != nil { + syscall.CloseHandle(h) + return nil, os.NewSyscallError("GetFileInformationByHandle", e) + } + ino = &inode{ + handle: h, + volume: fi.VolumeSerialNumber, + index: uint64(fi.FileIndexHigh)<<32 | uint64(fi.FileIndexLow), + } + return ino, nil +} + +// Must run within the I/O thread. +func (m watchMap) get(ino *inode) *watch { + if i := m[ino.volume]; i != nil { + return i[ino.index] + } + return nil +} + +// Must run within the I/O thread. +func (m watchMap) set(ino *inode, watch *watch) { + i := m[ino.volume] + if i == nil { + i = make(indexMap) + m[ino.volume] = i + } + i[ino.index] = watch +} + +// Must run within the I/O thread. +func (w *Watcher) addWatch(pathname string, flags uint64) error { + dir, err := getDir(pathname) + if err != nil { + return err + } + if flags&sysFSONLYDIR != 0 && pathname != dir { + return nil + } + ino, err := getIno(dir) + if err != nil { + return err + } + w.mu.Lock() + watchEntry := w.watches.get(ino) + w.mu.Unlock() + if watchEntry == nil { + if _, e := syscall.CreateIoCompletionPort(ino.handle, w.port, 0, 0); e != nil { + syscall.CloseHandle(ino.handle) + return os.NewSyscallError("CreateIoCompletionPort", e) + } + watchEntry = &watch{ + ino: ino, + path: dir, + names: make(map[string]uint64), + } + w.mu.Lock() + w.watches.set(ino, watchEntry) + w.mu.Unlock() + flags |= provisional + } else { + syscall.CloseHandle(ino.handle) + } + if pathname == dir { + watchEntry.mask |= flags + } else { + watchEntry.names[filepath.Base(pathname)] |= flags + } + if err = w.startRead(watchEntry); err != nil { + return err + } + if pathname == dir { + watchEntry.mask &= ^provisional + } else { + watchEntry.names[filepath.Base(pathname)] &= ^provisional + } + return nil +} + +// Must run within the I/O thread. +func (w *Watcher) remWatch(pathname string) error { + dir, err := getDir(pathname) + if err != nil { + return err + } + ino, err := getIno(dir) + if err != nil { + return err + } + w.mu.Lock() + watch := w.watches.get(ino) + w.mu.Unlock() + if watch == nil { + return fmt.Errorf("can't remove non-existent watch for: %s", pathname) + } + if pathname == dir { + w.sendEvent(watch.path, watch.mask&sysFSIGNORED) + watch.mask = 0 + } else { + name := filepath.Base(pathname) + w.sendEvent(filepath.Join(watch.path, name), watch.names[name]&sysFSIGNORED) + delete(watch.names, name) + } + return w.startRead(watch) +} + +// Must run within the I/O thread. +func (w *Watcher) deleteWatch(watch *watch) { + for name, mask := range watch.names { + if mask&provisional == 0 { + w.sendEvent(filepath.Join(watch.path, name), mask&sysFSIGNORED) + } + delete(watch.names, name) + } + if watch.mask != 0 { + if watch.mask&provisional == 0 { + w.sendEvent(watch.path, watch.mask&sysFSIGNORED) + } + watch.mask = 0 + } +} + +// Must run within the I/O thread. +func (w *Watcher) startRead(watch *watch) error { + if e := syscall.CancelIo(watch.ino.handle); e != nil { + w.Errors <- os.NewSyscallError("CancelIo", e) + w.deleteWatch(watch) + } + mask := toWindowsFlags(watch.mask) + for _, m := range watch.names { + mask |= toWindowsFlags(m) + } + if mask == 0 { + if e := syscall.CloseHandle(watch.ino.handle); e != nil { + w.Errors <- os.NewSyscallError("CloseHandle", e) + } + w.mu.Lock() + delete(w.watches[watch.ino.volume], watch.ino.index) + w.mu.Unlock() + return nil + } + e := syscall.ReadDirectoryChanges(watch.ino.handle, &watch.buf[0], + uint32(unsafe.Sizeof(watch.buf)), false, mask, nil, &watch.ov, 0) + if e != nil { + err := os.NewSyscallError("ReadDirectoryChanges", e) + if e == syscall.ERROR_ACCESS_DENIED && watch.mask&provisional == 0 { + // Watched directory was probably removed + if w.sendEvent(watch.path, watch.mask&sysFSDELETESELF) { + if watch.mask&sysFSONESHOT != 0 { + watch.mask = 0 + } + } + err = nil + } + w.deleteWatch(watch) + w.startRead(watch) + return err + } + return nil +} + +// readEvents reads from the I/O completion port, converts the +// received events into Event objects and sends them via the Events channel. +// Entry point to the I/O thread. +func (w *Watcher) readEvents() { + var ( + n, key uint32 + ov *syscall.Overlapped + ) + runtime.LockOSThread() + + for { + e := syscall.GetQueuedCompletionStatus(w.port, &n, &key, &ov, syscall.INFINITE) + watch := (*watch)(unsafe.Pointer(ov)) + + if watch == nil { + select { + case ch := <-w.quit: + w.mu.Lock() + var indexes []indexMap + for _, index := range w.watches { + indexes = append(indexes, index) + } + w.mu.Unlock() + for _, index := range indexes { + for _, watch := range index { + w.deleteWatch(watch) + w.startRead(watch) + } + } + var err error + if e := syscall.CloseHandle(w.port); e != nil { + err = os.NewSyscallError("CloseHandle", e) + } + close(w.Events) + close(w.Errors) + ch <- err + return + case in := <-w.input: + switch in.op { + case opAddWatch: + in.reply <- w.addWatch(in.path, uint64(in.flags)) + case opRemoveWatch: + in.reply <- w.remWatch(in.path) + } + default: + } + continue + } + + switch e { + case syscall.ERROR_MORE_DATA: + if watch == nil { + w.Errors <- errors.New("ERROR_MORE_DATA has unexpectedly null lpOverlapped buffer") + } else { + // The i/o succeeded but the buffer is full. + // In theory we should be building up a full packet. + // In practice we can get away with just carrying on. + n = uint32(unsafe.Sizeof(watch.buf)) + } + case syscall.ERROR_ACCESS_DENIED: + // Watched directory was probably removed + w.sendEvent(watch.path, watch.mask&sysFSDELETESELF) + w.deleteWatch(watch) + w.startRead(watch) + continue + case syscall.ERROR_OPERATION_ABORTED: + // CancelIo was called on this handle + continue + default: + w.Errors <- os.NewSyscallError("GetQueuedCompletionPort", e) + continue + case nil: + } + + var offset uint32 + for { + if n == 0 { + w.Events <- newEvent("", sysFSQOVERFLOW) + w.Errors <- errors.New("short read in readEvents()") + break + } + + // Point "raw" to the event in the buffer + raw := (*syscall.FileNotifyInformation)(unsafe.Pointer(&watch.buf[offset])) + buf := (*[syscall.MAX_PATH]uint16)(unsafe.Pointer(&raw.FileName)) + name := syscall.UTF16ToString(buf[:raw.FileNameLength/2]) + fullname := filepath.Join(watch.path, name) + + var mask uint64 + switch raw.Action { + case syscall.FILE_ACTION_REMOVED: + mask = sysFSDELETESELF + case syscall.FILE_ACTION_MODIFIED: + mask = sysFSMODIFY + case syscall.FILE_ACTION_RENAMED_OLD_NAME: + watch.rename = name + case syscall.FILE_ACTION_RENAMED_NEW_NAME: + if watch.names[watch.rename] != 0 { + watch.names[name] |= watch.names[watch.rename] + delete(watch.names, watch.rename) + mask = sysFSMOVESELF + } + } + + sendNameEvent := func() { + if w.sendEvent(fullname, watch.names[name]&mask) { + if watch.names[name]&sysFSONESHOT != 0 { + delete(watch.names, name) + } + } + } + if raw.Action != syscall.FILE_ACTION_RENAMED_NEW_NAME { + sendNameEvent() + } + if raw.Action == syscall.FILE_ACTION_REMOVED { + w.sendEvent(fullname, watch.names[name]&sysFSIGNORED) + delete(watch.names, name) + } + if w.sendEvent(fullname, watch.mask&toFSnotifyFlags(raw.Action)) { + if watch.mask&sysFSONESHOT != 0 { + watch.mask = 0 + } + } + if raw.Action == syscall.FILE_ACTION_RENAMED_NEW_NAME { + fullname = filepath.Join(watch.path, watch.rename) + sendNameEvent() + } + + // Move to the next event in the buffer + if raw.NextEntryOffset == 0 { + break + } + offset += raw.NextEntryOffset + + // Error! + if offset >= n { + w.Errors <- errors.New("Windows system assumed buffer larger than it is, events have likely been missed.") + break + } + } + + if err := w.startRead(watch); err != nil { + w.Errors <- err + } + } +} + +func (w *Watcher) sendEvent(name string, mask uint64) bool { + if mask == 0 { + return false + } + event := newEvent(name, uint32(mask)) + select { + case ch := <-w.quit: + w.quit <- ch + case w.Events <- event: + } + return true +} + +func toWindowsFlags(mask uint64) uint32 { + var m uint32 + if mask&sysFSACCESS != 0 { + m |= syscall.FILE_NOTIFY_CHANGE_LAST_ACCESS + } + if mask&sysFSMODIFY != 0 { + m |= syscall.FILE_NOTIFY_CHANGE_LAST_WRITE + } + if mask&sysFSATTRIB != 0 { + m |= syscall.FILE_NOTIFY_CHANGE_ATTRIBUTES + } + if mask&(sysFSMOVE|sysFSCREATE|sysFSDELETE) != 0 { + m |= syscall.FILE_NOTIFY_CHANGE_FILE_NAME | syscall.FILE_NOTIFY_CHANGE_DIR_NAME + } + return m +} + +func toFSnotifyFlags(action uint32) uint64 { + switch action { + case syscall.FILE_ACTION_ADDED: + return sysFSCREATE + case syscall.FILE_ACTION_REMOVED: + return sysFSDELETE + case syscall.FILE_ACTION_MODIFIED: + return sysFSMODIFY + case syscall.FILE_ACTION_RENAMED_OLD_NAME: + return sysFSMOVEDFROM + case syscall.FILE_ACTION_RENAMED_NEW_NAME: + return sysFSMOVEDTO + } + return 0 +} diff --git a/vendor/github.com/ghodss/yaml/.gitignore b/vendor/github.com/ghodss/yaml/.gitignore new file mode 100644 index 000000000..e256a31e0 --- /dev/null +++ b/vendor/github.com/ghodss/yaml/.gitignore @@ -0,0 +1,20 @@ +# OSX leaves these everywhere on SMB shares +._* + +# Eclipse files +.classpath +.project +.settings/** + +# Emacs save files +*~ + +# Vim-related files +[._]*.s[a-w][a-z] +[._]s[a-w][a-z] +*.un~ +Session.vim +.netrwhist + +# Go test binaries +*.test diff --git a/vendor/github.com/ghodss/yaml/.travis.yml b/vendor/github.com/ghodss/yaml/.travis.yml new file mode 100644 index 000000000..0e9d6edc0 --- /dev/null +++ b/vendor/github.com/ghodss/yaml/.travis.yml @@ -0,0 +1,7 @@ +language: go +go: + - 1.3 + - 1.4 +script: + - go test + - go build diff --git a/vendor/github.com/ghodss/yaml/LICENSE b/vendor/github.com/ghodss/yaml/LICENSE new file mode 100644 index 000000000..7805d36de --- /dev/null +++ b/vendor/github.com/ghodss/yaml/LICENSE @@ -0,0 +1,50 @@ +The MIT License (MIT) + +Copyright (c) 2014 Sam Ghods + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +Copyright (c) 2012 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/ghodss/yaml/README.md b/vendor/github.com/ghodss/yaml/README.md new file mode 100644 index 000000000..0200f75b4 --- /dev/null +++ b/vendor/github.com/ghodss/yaml/README.md @@ -0,0 +1,121 @@ +# YAML marshaling and unmarshaling support for Go + +[![Build Status](https://travis-ci.org/ghodss/yaml.svg)](https://travis-ci.org/ghodss/yaml) + +## Introduction + +A wrapper around [go-yaml](https://github.com/go-yaml/yaml) designed to enable a better way of handling YAML when marshaling to and from structs. + +In short, this library first converts YAML to JSON using go-yaml and then uses `json.Marshal` and `json.Unmarshal` to convert to or from the struct. This means that it effectively reuses the JSON struct tags as well as the custom JSON methods `MarshalJSON` and `UnmarshalJSON` unlike go-yaml. For a detailed overview of the rationale behind this method, [see this blog post](http://ghodss.com/2014/the-right-way-to-handle-yaml-in-golang/). + +## Compatibility + +This package uses [go-yaml](https://github.com/go-yaml/yaml) and therefore supports [everything go-yaml supports](https://github.com/go-yaml/yaml#compatibility). + +## Caveats + +**Caveat #1:** When using `yaml.Marshal` and `yaml.Unmarshal`, binary data should NOT be preceded with the `!!binary` YAML tag. If you do, go-yaml will convert the binary data from base64 to native binary data, which is not compatible with JSON. You can still use binary in your YAML files though - just store them without the `!!binary` tag and decode the base64 in your code (e.g. in the custom JSON methods `MarshalJSON` and `UnmarshalJSON`). This also has the benefit that your YAML and your JSON binary data will be decoded exactly the same way. As an example: + +``` +BAD: + exampleKey: !!binary gIGC + +GOOD: + exampleKey: gIGC +... and decode the base64 data in your code. +``` + +**Caveat #2:** When using `YAMLToJSON` directly, maps with keys that are maps will result in an error since this is not supported by JSON. This error will occur in `Unmarshal` as well since you can't unmarshal map keys anyways since struct fields can't be keys. + +## Installation and usage + +To install, run: + +``` +$ go get github.com/ghodss/yaml +``` + +And import using: + +``` +import "github.com/ghodss/yaml" +``` + +Usage is very similar to the JSON library: + +```go +package main + +import ( + "fmt" + + "github.com/ghodss/yaml" +) + +type Person struct { + Name string `json:"name"` // Affects YAML field names too. + Age int `json:"age"` +} + +func main() { + // Marshal a Person struct to YAML. + p := Person{"John", 30} + y, err := yaml.Marshal(p) + if err != nil { + fmt.Printf("err: %v\n", err) + return + } + fmt.Println(string(y)) + /* Output: + age: 30 + name: John + */ + + // Unmarshal the YAML back into a Person struct. + var p2 Person + err = yaml.Unmarshal(y, &p2) + if err != nil { + fmt.Printf("err: %v\n", err) + return + } + fmt.Println(p2) + /* Output: + {John 30} + */ +} +``` + +`yaml.YAMLToJSON` and `yaml.JSONToYAML` methods are also available: + +```go +package main + +import ( + "fmt" + + "github.com/ghodss/yaml" +) + +func main() { + j := []byte(`{"name": "John", "age": 30}`) + y, err := yaml.JSONToYAML(j) + if err != nil { + fmt.Printf("err: %v\n", err) + return + } + fmt.Println(string(y)) + /* Output: + name: John + age: 30 + */ + j2, err := yaml.YAMLToJSON(y) + if err != nil { + fmt.Printf("err: %v\n", err) + return + } + fmt.Println(string(j2)) + /* Output: + {"age":30,"name":"John"} + */ +} +``` diff --git a/vendor/github.com/ghodss/yaml/fields.go b/vendor/github.com/ghodss/yaml/fields.go new file mode 100644 index 000000000..586007402 --- /dev/null +++ b/vendor/github.com/ghodss/yaml/fields.go @@ -0,0 +1,501 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +package yaml + +import ( + "bytes" + "encoding" + "encoding/json" + "reflect" + "sort" + "strings" + "sync" + "unicode" + "unicode/utf8" +) + +// indirect walks down v allocating pointers as needed, +// until it gets to a non-pointer. +// if it encounters an Unmarshaler, indirect stops and returns that. +// if decodingNull is true, indirect stops at the last pointer so it can be set to nil. +func indirect(v reflect.Value, decodingNull bool) (json.Unmarshaler, encoding.TextUnmarshaler, reflect.Value) { + // If v is a named type and is addressable, + // start with its address, so that if the type has pointer methods, + // we find them. + if v.Kind() != reflect.Ptr && v.Type().Name() != "" && v.CanAddr() { + v = v.Addr() + } + for { + // Load value from interface, but only if the result will be + // usefully addressable. + if v.Kind() == reflect.Interface && !v.IsNil() { + e := v.Elem() + if e.Kind() == reflect.Ptr && !e.IsNil() && (!decodingNull || e.Elem().Kind() == reflect.Ptr) { + v = e + continue + } + } + + if v.Kind() != reflect.Ptr { + break + } + + if v.Elem().Kind() != reflect.Ptr && decodingNull && v.CanSet() { + break + } + if v.IsNil() { + if v.CanSet() { + v.Set(reflect.New(v.Type().Elem())) + } else { + v = reflect.New(v.Type().Elem()) + } + } + if v.Type().NumMethod() > 0 { + if u, ok := v.Interface().(json.Unmarshaler); ok { + return u, nil, reflect.Value{} + } + if u, ok := v.Interface().(encoding.TextUnmarshaler); ok { + return nil, u, reflect.Value{} + } + } + v = v.Elem() + } + return nil, nil, v +} + +// A field represents a single field found in a struct. +type field struct { + name string + nameBytes []byte // []byte(name) + equalFold func(s, t []byte) bool // bytes.EqualFold or equivalent + + tag bool + index []int + typ reflect.Type + omitEmpty bool + quoted bool +} + +func fillField(f field) field { + f.nameBytes = []byte(f.name) + f.equalFold = foldFunc(f.nameBytes) + return f +} + +// byName sorts field by name, breaking ties with depth, +// then breaking ties with "name came from json tag", then +// breaking ties with index sequence. +type byName []field + +func (x byName) Len() int { return len(x) } + +func (x byName) Swap(i, j int) { x[i], x[j] = x[j], x[i] } + +func (x byName) Less(i, j int) bool { + if x[i].name != x[j].name { + return x[i].name < x[j].name + } + if len(x[i].index) != len(x[j].index) { + return len(x[i].index) < len(x[j].index) + } + if x[i].tag != x[j].tag { + return x[i].tag + } + return byIndex(x).Less(i, j) +} + +// byIndex sorts field by index sequence. +type byIndex []field + +func (x byIndex) Len() int { return len(x) } + +func (x byIndex) Swap(i, j int) { x[i], x[j] = x[j], x[i] } + +func (x byIndex) Less(i, j int) bool { + for k, xik := range x[i].index { + if k >= len(x[j].index) { + return false + } + if xik != x[j].index[k] { + return xik < x[j].index[k] + } + } + return len(x[i].index) < len(x[j].index) +} + +// typeFields returns a list of fields that JSON should recognize for the given type. +// The algorithm is breadth-first search over the set of structs to include - the top struct +// and then any reachable anonymous structs. +func typeFields(t reflect.Type) []field { + // Anonymous fields to explore at the current level and the next. + current := []field{} + next := []field{{typ: t}} + + // Count of queued names for current level and the next. + count := map[reflect.Type]int{} + nextCount := map[reflect.Type]int{} + + // Types already visited at an earlier level. + visited := map[reflect.Type]bool{} + + // Fields found. + var fields []field + + for len(next) > 0 { + current, next = next, current[:0] + count, nextCount = nextCount, map[reflect.Type]int{} + + for _, f := range current { + if visited[f.typ] { + continue + } + visited[f.typ] = true + + // Scan f.typ for fields to include. + for i := 0; i < f.typ.NumField(); i++ { + sf := f.typ.Field(i) + if sf.PkgPath != "" { // unexported + continue + } + tag := sf.Tag.Get("json") + if tag == "-" { + continue + } + name, opts := parseTag(tag) + if !isValidTag(name) { + name = "" + } + index := make([]int, len(f.index)+1) + copy(index, f.index) + index[len(f.index)] = i + + ft := sf.Type + if ft.Name() == "" && ft.Kind() == reflect.Ptr { + // Follow pointer. + ft = ft.Elem() + } + + // Record found field and index sequence. + if name != "" || !sf.Anonymous || ft.Kind() != reflect.Struct { + tagged := name != "" + if name == "" { + name = sf.Name + } + fields = append(fields, fillField(field{ + name: name, + tag: tagged, + index: index, + typ: ft, + omitEmpty: opts.Contains("omitempty"), + quoted: opts.Contains("string"), + })) + if count[f.typ] > 1 { + // If there were multiple instances, add a second, + // so that the annihilation code will see a duplicate. + // It only cares about the distinction between 1 or 2, + // so don't bother generating any more copies. + fields = append(fields, fields[len(fields)-1]) + } + continue + } + + // Record new anonymous struct to explore in next round. + nextCount[ft]++ + if nextCount[ft] == 1 { + next = append(next, fillField(field{name: ft.Name(), index: index, typ: ft})) + } + } + } + } + + sort.Sort(byName(fields)) + + // Delete all fields that are hidden by the Go rules for embedded fields, + // except that fields with JSON tags are promoted. + + // The fields are sorted in primary order of name, secondary order + // of field index length. Loop over names; for each name, delete + // hidden fields by choosing the one dominant field that survives. + out := fields[:0] + for advance, i := 0, 0; i < len(fields); i += advance { + // One iteration per name. + // Find the sequence of fields with the name of this first field. + fi := fields[i] + name := fi.name + for advance = 1; i+advance < len(fields); advance++ { + fj := fields[i+advance] + if fj.name != name { + break + } + } + if advance == 1 { // Only one field with this name + out = append(out, fi) + continue + } + dominant, ok := dominantField(fields[i : i+advance]) + if ok { + out = append(out, dominant) + } + } + + fields = out + sort.Sort(byIndex(fields)) + + return fields +} + +// dominantField looks through the fields, all of which are known to +// have the same name, to find the single field that dominates the +// others using Go's embedding rules, modified by the presence of +// JSON tags. If there are multiple top-level fields, the boolean +// will be false: This condition is an error in Go and we skip all +// the fields. +func dominantField(fields []field) (field, bool) { + // The fields are sorted in increasing index-length order. The winner + // must therefore be one with the shortest index length. Drop all + // longer entries, which is easy: just truncate the slice. + length := len(fields[0].index) + tagged := -1 // Index of first tagged field. + for i, f := range fields { + if len(f.index) > length { + fields = fields[:i] + break + } + if f.tag { + if tagged >= 0 { + // Multiple tagged fields at the same level: conflict. + // Return no field. + return field{}, false + } + tagged = i + } + } + if tagged >= 0 { + return fields[tagged], true + } + // All remaining fields have the same length. If there's more than one, + // we have a conflict (two fields named "X" at the same level) and we + // return no field. + if len(fields) > 1 { + return field{}, false + } + return fields[0], true +} + +var fieldCache struct { + sync.RWMutex + m map[reflect.Type][]field +} + +// cachedTypeFields is like typeFields but uses a cache to avoid repeated work. +func cachedTypeFields(t reflect.Type) []field { + fieldCache.RLock() + f := fieldCache.m[t] + fieldCache.RUnlock() + if f != nil { + return f + } + + // Compute fields without lock. + // Might duplicate effort but won't hold other computations back. + f = typeFields(t) + if f == nil { + f = []field{} + } + + fieldCache.Lock() + if fieldCache.m == nil { + fieldCache.m = map[reflect.Type][]field{} + } + fieldCache.m[t] = f + fieldCache.Unlock() + return f +} + +func isValidTag(s string) bool { + if s == "" { + return false + } + for _, c := range s { + switch { + case strings.ContainsRune("!#$%&()*+-./:<=>?@[]^_{|}~ ", c): + // Backslash and quote chars are reserved, but + // otherwise any punctuation chars are allowed + // in a tag name. + default: + if !unicode.IsLetter(c) && !unicode.IsDigit(c) { + return false + } + } + } + return true +} + +const ( + caseMask = ^byte(0x20) // Mask to ignore case in ASCII. + kelvin = '\u212a' + smallLongEss = '\u017f' +) + +// foldFunc returns one of four different case folding equivalence +// functions, from most general (and slow) to fastest: +// +// 1) bytes.EqualFold, if the key s contains any non-ASCII UTF-8 +// 2) equalFoldRight, if s contains special folding ASCII ('k', 'K', 's', 'S') +// 3) asciiEqualFold, no special, but includes non-letters (including _) +// 4) simpleLetterEqualFold, no specials, no non-letters. +// +// The letters S and K are special because they map to 3 runes, not just 2: +// * S maps to s and to U+017F 'ſ' Latin small letter long s +// * k maps to K and to U+212A 'K' Kelvin sign +// See http://play.golang.org/p/tTxjOc0OGo +// +// The returned function is specialized for matching against s and +// should only be given s. It's not curried for performance reasons. +func foldFunc(s []byte) func(s, t []byte) bool { + nonLetter := false + special := false // special letter + for _, b := range s { + if b >= utf8.RuneSelf { + return bytes.EqualFold + } + upper := b & caseMask + if upper < 'A' || upper > 'Z' { + nonLetter = true + } else if upper == 'K' || upper == 'S' { + // See above for why these letters are special. + special = true + } + } + if special { + return equalFoldRight + } + if nonLetter { + return asciiEqualFold + } + return simpleLetterEqualFold +} + +// equalFoldRight is a specialization of bytes.EqualFold when s is +// known to be all ASCII (including punctuation), but contains an 's', +// 'S', 'k', or 'K', requiring a Unicode fold on the bytes in t. +// See comments on foldFunc. +func equalFoldRight(s, t []byte) bool { + for _, sb := range s { + if len(t) == 0 { + return false + } + tb := t[0] + if tb < utf8.RuneSelf { + if sb != tb { + sbUpper := sb & caseMask + if 'A' <= sbUpper && sbUpper <= 'Z' { + if sbUpper != tb&caseMask { + return false + } + } else { + return false + } + } + t = t[1:] + continue + } + // sb is ASCII and t is not. t must be either kelvin + // sign or long s; sb must be s, S, k, or K. + tr, size := utf8.DecodeRune(t) + switch sb { + case 's', 'S': + if tr != smallLongEss { + return false + } + case 'k', 'K': + if tr != kelvin { + return false + } + default: + return false + } + t = t[size:] + + } + if len(t) > 0 { + return false + } + return true +} + +// asciiEqualFold is a specialization of bytes.EqualFold for use when +// s is all ASCII (but may contain non-letters) and contains no +// special-folding letters. +// See comments on foldFunc. +func asciiEqualFold(s, t []byte) bool { + if len(s) != len(t) { + return false + } + for i, sb := range s { + tb := t[i] + if sb == tb { + continue + } + if ('a' <= sb && sb <= 'z') || ('A' <= sb && sb <= 'Z') { + if sb&caseMask != tb&caseMask { + return false + } + } else { + return false + } + } + return true +} + +// simpleLetterEqualFold is a specialization of bytes.EqualFold for +// use when s is all ASCII letters (no underscores, etc) and also +// doesn't contain 'k', 'K', 's', or 'S'. +// See comments on foldFunc. +func simpleLetterEqualFold(s, t []byte) bool { + if len(s) != len(t) { + return false + } + for i, b := range s { + if b&caseMask != t[i]&caseMask { + return false + } + } + return true +} + +// tagOptions is the string following a comma in a struct field's "json" +// tag, or the empty string. It does not include the leading comma. +type tagOptions string + +// parseTag splits a struct field's json tag into its name and +// comma-separated options. +func parseTag(tag string) (string, tagOptions) { + if idx := strings.Index(tag, ","); idx != -1 { + return tag[:idx], tagOptions(tag[idx+1:]) + } + return tag, tagOptions("") +} + +// Contains reports whether a comma-separated list of options +// contains a particular substr flag. substr must be surrounded by a +// string boundary or commas. +func (o tagOptions) Contains(optionName string) bool { + if len(o) == 0 { + return false + } + s := string(o) + for s != "" { + var next string + i := strings.Index(s, ",") + if i >= 0 { + s, next = s[:i], s[i+1:] + } + if s == optionName { + return true + } + s = next + } + return false +} diff --git a/vendor/github.com/ghodss/yaml/yaml.go b/vendor/github.com/ghodss/yaml/yaml.go new file mode 100644 index 000000000..4fb4054a8 --- /dev/null +++ b/vendor/github.com/ghodss/yaml/yaml.go @@ -0,0 +1,277 @@ +package yaml + +import ( + "bytes" + "encoding/json" + "fmt" + "reflect" + "strconv" + + "gopkg.in/yaml.v2" +) + +// Marshals the object into JSON then converts JSON to YAML and returns the +// YAML. +func Marshal(o interface{}) ([]byte, error) { + j, err := json.Marshal(o) + if err != nil { + return nil, fmt.Errorf("error marshaling into JSON: %v", err) + } + + y, err := JSONToYAML(j) + if err != nil { + return nil, fmt.Errorf("error converting JSON to YAML: %v", err) + } + + return y, nil +} + +// Converts YAML to JSON then uses JSON to unmarshal into an object. +func Unmarshal(y []byte, o interface{}) error { + vo := reflect.ValueOf(o) + j, err := yamlToJSON(y, &vo) + if err != nil { + return fmt.Errorf("error converting YAML to JSON: %v", err) + } + + err = json.Unmarshal(j, o) + if err != nil { + return fmt.Errorf("error unmarshaling JSON: %v", err) + } + + return nil +} + +// Convert JSON to YAML. +func JSONToYAML(j []byte) ([]byte, error) { + // Convert the JSON to an object. + var jsonObj interface{} + // We are using yaml.Unmarshal here (instead of json.Unmarshal) because the + // Go JSON library doesn't try to pick the right number type (int, float, + // etc.) when unmarshalling to interface{}, it just picks float64 + // universally. go-yaml does go through the effort of picking the right + // number type, so we can preserve number type throughout this process. + err := yaml.Unmarshal(j, &jsonObj) + if err != nil { + return nil, err + } + + // Marshal this object into YAML. + return yaml.Marshal(jsonObj) +} + +// Convert YAML to JSON. Since JSON is a subset of YAML, passing JSON through +// this method should be a no-op. +// +// Things YAML can do that are not supported by JSON: +// * In YAML you can have binary and null keys in your maps. These are invalid +// in JSON. (int and float keys are converted to strings.) +// * Binary data in YAML with the !!binary tag is not supported. If you want to +// use binary data with this library, encode the data as base64 as usual but do +// not use the !!binary tag in your YAML. This will ensure the original base64 +// encoded data makes it all the way through to the JSON. +func YAMLToJSON(y []byte) ([]byte, error) { + return yamlToJSON(y, nil) +} + +func yamlToJSON(y []byte, jsonTarget *reflect.Value) ([]byte, error) { + // Convert the YAML to an object. + var yamlObj interface{} + err := yaml.Unmarshal(y, &yamlObj) + if err != nil { + return nil, err + } + + // YAML objects are not completely compatible with JSON objects (e.g. you + // can have non-string keys in YAML). So, convert the YAML-compatible object + // to a JSON-compatible object, failing with an error if irrecoverable + // incompatibilties happen along the way. + jsonObj, err := convertToJSONableObject(yamlObj, jsonTarget) + if err != nil { + return nil, err + } + + // Convert this object to JSON and return the data. + return json.Marshal(jsonObj) +} + +func convertToJSONableObject(yamlObj interface{}, jsonTarget *reflect.Value) (interface{}, error) { + var err error + + // Resolve jsonTarget to a concrete value (i.e. not a pointer or an + // interface). We pass decodingNull as false because we're not actually + // decoding into the value, we're just checking if the ultimate target is a + // string. + if jsonTarget != nil { + ju, tu, pv := indirect(*jsonTarget, false) + // We have a JSON or Text Umarshaler at this level, so we can't be trying + // to decode into a string. + if ju != nil || tu != nil { + jsonTarget = nil + } else { + jsonTarget = &pv + } + } + + // If yamlObj is a number or a boolean, check if jsonTarget is a string - + // if so, coerce. Else return normal. + // If yamlObj is a map or array, find the field that each key is + // unmarshaling to, and when you recurse pass the reflect.Value for that + // field back into this function. + switch typedYAMLObj := yamlObj.(type) { + case map[interface{}]interface{}: + // JSON does not support arbitrary keys in a map, so we must convert + // these keys to strings. + // + // From my reading of go-yaml v2 (specifically the resolve function), + // keys can only have the types string, int, int64, float64, binary + // (unsupported), or null (unsupported). + strMap := make(map[string]interface{}) + for k, v := range typedYAMLObj { + // Resolve the key to a string first. + var keyString string + switch typedKey := k.(type) { + case string: + keyString = typedKey + case int: + keyString = strconv.Itoa(typedKey) + case int64: + // go-yaml will only return an int64 as a key if the system + // architecture is 32-bit and the key's value is between 32-bit + // and 64-bit. Otherwise the key type will simply be int. + keyString = strconv.FormatInt(typedKey, 10) + case float64: + // Stolen from go-yaml to use the same conversion to string as + // the go-yaml library uses to convert float to string when + // Marshaling. + s := strconv.FormatFloat(typedKey, 'g', -1, 32) + switch s { + case "+Inf": + s = ".inf" + case "-Inf": + s = "-.inf" + case "NaN": + s = ".nan" + } + keyString = s + case bool: + if typedKey { + keyString = "true" + } else { + keyString = "false" + } + default: + return nil, fmt.Errorf("Unsupported map key of type: %s, key: %+#v, value: %+#v", + reflect.TypeOf(k), k, v) + } + + // jsonTarget should be a struct or a map. If it's a struct, find + // the field it's going to map to and pass its reflect.Value. If + // it's a map, find the element type of the map and pass the + // reflect.Value created from that type. If it's neither, just pass + // nil - JSON conversion will error for us if it's a real issue. + if jsonTarget != nil { + t := *jsonTarget + if t.Kind() == reflect.Struct { + keyBytes := []byte(keyString) + // Find the field that the JSON library would use. + var f *field + fields := cachedTypeFields(t.Type()) + for i := range fields { + ff := &fields[i] + if bytes.Equal(ff.nameBytes, keyBytes) { + f = ff + break + } + // Do case-insensitive comparison. + if f == nil && ff.equalFold(ff.nameBytes, keyBytes) { + f = ff + } + } + if f != nil { + // Find the reflect.Value of the most preferential + // struct field. + jtf := t.Field(f.index[0]) + strMap[keyString], err = convertToJSONableObject(v, &jtf) + if err != nil { + return nil, err + } + continue + } + } else if t.Kind() == reflect.Map { + // Create a zero value of the map's element type to use as + // the JSON target. + jtv := reflect.Zero(t.Type().Elem()) + strMap[keyString], err = convertToJSONableObject(v, &jtv) + if err != nil { + return nil, err + } + continue + } + } + strMap[keyString], err = convertToJSONableObject(v, nil) + if err != nil { + return nil, err + } + } + return strMap, nil + case []interface{}: + // We need to recurse into arrays in case there are any + // map[interface{}]interface{}'s inside and to convert any + // numbers to strings. + + // If jsonTarget is a slice (which it really should be), find the + // thing it's going to map to. If it's not a slice, just pass nil + // - JSON conversion will error for us if it's a real issue. + var jsonSliceElemValue *reflect.Value + if jsonTarget != nil { + t := *jsonTarget + if t.Kind() == reflect.Slice { + // By default slices point to nil, but we need a reflect.Value + // pointing to a value of the slice type, so we create one here. + ev := reflect.Indirect(reflect.New(t.Type().Elem())) + jsonSliceElemValue = &ev + } + } + + // Make and use a new array. + arr := make([]interface{}, len(typedYAMLObj)) + for i, v := range typedYAMLObj { + arr[i], err = convertToJSONableObject(v, jsonSliceElemValue) + if err != nil { + return nil, err + } + } + return arr, nil + default: + // If the target type is a string and the YAML type is a number, + // convert the YAML type to a string. + if jsonTarget != nil && (*jsonTarget).Kind() == reflect.String { + // Based on my reading of go-yaml, it may return int, int64, + // float64, or uint64. + var s string + switch typedVal := typedYAMLObj.(type) { + case int: + s = strconv.FormatInt(int64(typedVal), 10) + case int64: + s = strconv.FormatInt(typedVal, 10) + case float64: + s = strconv.FormatFloat(typedVal, 'g', -1, 32) + case uint64: + s = strconv.FormatUint(typedVal, 10) + case bool: + if typedVal { + s = "true" + } else { + s = "false" + } + } + if len(s) > 0 { + yamlObj = interface{}(s) + } + } + return yamlObj, nil + } + + return nil, nil +} diff --git a/vendor/github.com/ghodss/yaml/yaml_test.go b/vendor/github.com/ghodss/yaml/yaml_test.go new file mode 100644 index 000000000..505af4530 --- /dev/null +++ b/vendor/github.com/ghodss/yaml/yaml_test.go @@ -0,0 +1,287 @@ +package yaml + +import ( + "fmt" + "math" + "reflect" + "strconv" + "testing" +) + +type MarshalTest struct { + A string + B int64 + // Would like to test float64, but it's not supported in go-yaml. + // (See https://github.com/go-yaml/yaml/issues/83.) + C float32 +} + +func TestMarshal(t *testing.T) { + f32String := strconv.FormatFloat(math.MaxFloat32, 'g', -1, 32) + s := MarshalTest{"a", math.MaxInt64, math.MaxFloat32} + e := []byte(fmt.Sprintf("A: a\nB: %d\nC: %s\n", math.MaxInt64, f32String)) + + y, err := Marshal(s) + if err != nil { + t.Errorf("error marshaling YAML: %v", err) + } + + if !reflect.DeepEqual(y, e) { + t.Errorf("marshal YAML was unsuccessful, expected: %#v, got: %#v", + string(e), string(y)) + } +} + +type UnmarshalString struct { + A string + True string +} + +type UnmarshalStringMap struct { + A map[string]string +} + +type UnmarshalNestedString struct { + A NestedString +} + +type NestedString struct { + A string +} + +type UnmarshalSlice struct { + A []NestedSlice +} + +type NestedSlice struct { + B string + C *string +} + +func TestUnmarshal(t *testing.T) { + y := []byte("a: 1") + s1 := UnmarshalString{} + e1 := UnmarshalString{A: "1"} + unmarshal(t, y, &s1, &e1) + + y = []byte("a: true") + s1 = UnmarshalString{} + e1 = UnmarshalString{A: "true"} + unmarshal(t, y, &s1, &e1) + + y = []byte("true: 1") + s1 = UnmarshalString{} + e1 = UnmarshalString{True: "1"} + unmarshal(t, y, &s1, &e1) + + y = []byte("a:\n a: 1") + s2 := UnmarshalNestedString{} + e2 := UnmarshalNestedString{NestedString{"1"}} + unmarshal(t, y, &s2, &e2) + + y = []byte("a:\n - b: abc\n c: def\n - b: 123\n c: 456\n") + s3 := UnmarshalSlice{} + e3 := UnmarshalSlice{[]NestedSlice{NestedSlice{"abc", strPtr("def")}, NestedSlice{"123", strPtr("456")}}} + unmarshal(t, y, &s3, &e3) + + y = []byte("a:\n b: 1") + s4 := UnmarshalStringMap{} + e4 := UnmarshalStringMap{map[string]string{"b": "1"}} + unmarshal(t, y, &s4, &e4) + + y = []byte(` +a: + name: TestA +b: + name: TestB +`) + type NamedThing struct { + Name string `json:"name"` + } + s5 := map[string]*NamedThing{} + e5 := map[string]*NamedThing{ + "a": &NamedThing{Name: "TestA"}, + "b": &NamedThing{Name: "TestB"}, + } + unmarshal(t, y, &s5, &e5) +} + +func unmarshal(t *testing.T, y []byte, s, e interface{}) { + err := Unmarshal(y, s) + if err != nil { + t.Errorf("error unmarshaling YAML: %v", err) + } + + if !reflect.DeepEqual(s, e) { + t.Errorf("unmarshal YAML was unsuccessful, expected: %+#v, got: %+#v", + e, s) + } +} + +type Case struct { + input string + output string + // By default we test that reversing the output == input. But if there is a + // difference in the reversed output, you can optionally specify it here. + reverse *string +} + +type RunType int + +const ( + RunTypeJSONToYAML RunType = iota + RunTypeYAMLToJSON +) + +func TestJSONToYAML(t *testing.T) { + cases := []Case{ + { + `{"t":"a"}`, + "t: a\n", + nil, + }, { + `{"t":null}`, + "t: null\n", + nil, + }, + } + + runCases(t, RunTypeJSONToYAML, cases) +} + +func TestYAMLToJSON(t *testing.T) { + cases := []Case{ + { + "t: a\n", + `{"t":"a"}`, + nil, + }, { + "t: \n", + `{"t":null}`, + strPtr("t: null\n"), + }, { + "t: null\n", + `{"t":null}`, + nil, + }, { + "1: a\n", + `{"1":"a"}`, + strPtr("\"1\": a\n"), + }, { + "1000000000000000000000000000000000000: a\n", + `{"1e+36":"a"}`, + strPtr("\"1e+36\": a\n"), + }, { + "1e+36: a\n", + `{"1e+36":"a"}`, + strPtr("\"1e+36\": a\n"), + }, { + "\"1e+36\": a\n", + `{"1e+36":"a"}`, + nil, + }, { + "\"1.2\": a\n", + `{"1.2":"a"}`, + nil, + }, { + "- t: a\n", + `[{"t":"a"}]`, + nil, + }, { + "- t: a\n" + + "- t:\n" + + " b: 1\n" + + " c: 2\n", + `[{"t":"a"},{"t":{"b":1,"c":2}}]`, + nil, + }, { + `[{t: a}, {t: {b: 1, c: 2}}]`, + `[{"t":"a"},{"t":{"b":1,"c":2}}]`, + strPtr("- t: a\n" + + "- t:\n" + + " b: 1\n" + + " c: 2\n"), + }, { + "- t: \n", + `[{"t":null}]`, + strPtr("- t: null\n"), + }, { + "- t: null\n", + `[{"t":null}]`, + nil, + }, + } + + // Cases that should produce errors. + _ = []Case{ + { + "~: a", + `{"null":"a"}`, + nil, + }, { + "a: !!binary gIGC\n", + "{\"a\":\"\x80\x81\x82\"}", + nil, + }, + } + + runCases(t, RunTypeYAMLToJSON, cases) +} + +func runCases(t *testing.T, runType RunType, cases []Case) { + var f func([]byte) ([]byte, error) + var invF func([]byte) ([]byte, error) + var msg string + var invMsg string + if runType == RunTypeJSONToYAML { + f = JSONToYAML + invF = YAMLToJSON + msg = "JSON to YAML" + invMsg = "YAML back to JSON" + } else { + f = YAMLToJSON + invF = JSONToYAML + msg = "YAML to JSON" + invMsg = "JSON back to YAML" + } + + for _, c := range cases { + // Convert the string. + t.Logf("converting %s\n", c.input) + output, err := f([]byte(c.input)) + if err != nil { + t.Errorf("Failed to convert %s, input: `%s`, err: %v", msg, c.input, err) + } + + // Check it against the expected output. + if string(output) != c.output { + t.Errorf("Failed to convert %s, input: `%s`, expected `%s`, got `%s`", + msg, c.input, c.output, string(output)) + } + + // Set the string that we will compare the reversed output to. + reverse := c.input + // If a special reverse string was specified, use that instead. + if c.reverse != nil { + reverse = *c.reverse + } + + // Reverse the output. + input, err := invF(output) + if err != nil { + t.Errorf("Failed to convert %s, input: `%s`, err: %v", invMsg, string(output), err) + } + + // Check the reverse is equal to the input (or to *c.reverse). + if string(input) != reverse { + t.Errorf("Failed to convert %s, input: `%s`, expected `%s`, got `%s`", + invMsg, string(output), reverse, string(input)) + } + } + +} + +// To be able to easily fill in the *Case.reverse string above. +func strPtr(s string) *string { + return &s +} diff --git a/vendor/github.com/golang/protobuf/.gitignore b/vendor/github.com/golang/protobuf/.gitignore new file mode 100644 index 000000000..8f5b596b1 --- /dev/null +++ b/vendor/github.com/golang/protobuf/.gitignore @@ -0,0 +1,16 @@ +.DS_Store +*.[568ao] +*.ao +*.so +*.pyc +._* +.nfs.* +[568a].out +*~ +*.orig +core +_obj +_test +_testmain.go +protoc-gen-go/testdata/multi/*.pb.go +_conformance/_conformance diff --git a/vendor/github.com/golang/protobuf/.travis.yml b/vendor/github.com/golang/protobuf/.travis.yml new file mode 100644 index 000000000..93c67805b --- /dev/null +++ b/vendor/github.com/golang/protobuf/.travis.yml @@ -0,0 +1,18 @@ +sudo: false +language: go +go: +- 1.6.x +- 1.7.x +- 1.8.x +- 1.9.x + +install: + - go get -v -d -t github.com/golang/protobuf/... + - curl -L https://github.com/google/protobuf/releases/download/v3.3.0/protoc-3.3.0-linux-x86_64.zip -o /tmp/protoc.zip + - unzip /tmp/protoc.zip -d $HOME/protoc + +env: + - PATH=$HOME/protoc/bin:$PATH + +script: + - make all test diff --git a/vendor/github.com/golang/protobuf/AUTHORS b/vendor/github.com/golang/protobuf/AUTHORS new file mode 100644 index 000000000..15167cd74 --- /dev/null +++ b/vendor/github.com/golang/protobuf/AUTHORS @@ -0,0 +1,3 @@ +# This source code refers to The Go Authors for copyright purposes. +# The master list of authors is in the main Go distribution, +# visible at http://tip.golang.org/AUTHORS. diff --git a/vendor/github.com/golang/protobuf/CONTRIBUTORS b/vendor/github.com/golang/protobuf/CONTRIBUTORS new file mode 100644 index 000000000..1c4577e96 --- /dev/null +++ b/vendor/github.com/golang/protobuf/CONTRIBUTORS @@ -0,0 +1,3 @@ +# This source code was written by the Go contributors. +# The master list of contributors is in the main Go distribution, +# visible at http://tip.golang.org/CONTRIBUTORS. diff --git a/vendor/github.com/golang/protobuf/LICENSE b/vendor/github.com/golang/protobuf/LICENSE new file mode 100644 index 000000000..1b1b1921e --- /dev/null +++ b/vendor/github.com/golang/protobuf/LICENSE @@ -0,0 +1,31 @@ +Go support for Protocol Buffers - Google's data interchange format + +Copyright 2010 The Go Authors. All rights reserved. +https://github.com/golang/protobuf + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + diff --git a/vendor/github.com/golang/protobuf/Make.protobuf b/vendor/github.com/golang/protobuf/Make.protobuf new file mode 100644 index 000000000..15071de10 --- /dev/null +++ b/vendor/github.com/golang/protobuf/Make.protobuf @@ -0,0 +1,40 @@ +# Go support for Protocol Buffers - Google's data interchange format +# +# Copyright 2010 The Go Authors. All rights reserved. +# https://github.com/golang/protobuf +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# Includable Makefile to add a rule for generating .pb.go files from .proto files +# (Google protocol buffer descriptions). +# Typical use if myproto.proto is a file in package mypackage in this directory: +# +# include $(GOROOT)/src/pkg/github.com/golang/protobuf/Make.protobuf + +%.pb.go: %.proto + protoc --go_out=. $< + diff --git a/vendor/github.com/golang/protobuf/Makefile b/vendor/github.com/golang/protobuf/Makefile new file mode 100644 index 000000000..a1421d8b7 --- /dev/null +++ b/vendor/github.com/golang/protobuf/Makefile @@ -0,0 +1,55 @@ +# Go support for Protocol Buffers - Google's data interchange format +# +# Copyright 2010 The Go Authors. All rights reserved. +# https://github.com/golang/protobuf +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +all: install + +install: + go install ./proto ./jsonpb ./ptypes + go install ./protoc-gen-go + +test: + go test ./proto ./jsonpb ./ptypes + make -C protoc-gen-go/testdata test + +clean: + go clean ./... + +nuke: + go clean -i ./... + +regenerate: + make -C protoc-gen-go/descriptor regenerate + make -C protoc-gen-go/plugin regenerate + make -C protoc-gen-go/testdata regenerate + make -C proto/testdata regenerate + make -C jsonpb/jsonpb_test_proto regenerate + make -C _conformance regenerate diff --git a/vendor/github.com/golang/protobuf/README.md b/vendor/github.com/golang/protobuf/README.md new file mode 100644 index 000000000..9c4c815c0 --- /dev/null +++ b/vendor/github.com/golang/protobuf/README.md @@ -0,0 +1,244 @@ +# Go support for Protocol Buffers + +[![Build Status](https://travis-ci.org/golang/protobuf.svg?branch=master)](https://travis-ci.org/golang/protobuf) +[![GoDoc](https://godoc.org/github.com/golang/protobuf?status.svg)](https://godoc.org/github.com/golang/protobuf) + +Google's data interchange format. +Copyright 2010 The Go Authors. +https://github.com/golang/protobuf + +This package and the code it generates requires at least Go 1.4. + +This software implements Go bindings for protocol buffers. For +information about protocol buffers themselves, see + https://developers.google.com/protocol-buffers/ + +## Installation ## + +To use this software, you must: +- Install the standard C++ implementation of protocol buffers from + https://developers.google.com/protocol-buffers/ +- Of course, install the Go compiler and tools from + https://golang.org/ + See + https://golang.org/doc/install + for details or, if you are using gccgo, follow the instructions at + https://golang.org/doc/install/gccgo +- Grab the code from the repository and install the proto package. + The simplest way is to run `go get -u github.com/golang/protobuf/protoc-gen-go`. + The compiler plugin, protoc-gen-go, will be installed in $GOBIN, + defaulting to $GOPATH/bin. It must be in your $PATH for the protocol + compiler, protoc, to find it. + +This software has two parts: a 'protocol compiler plugin' that +generates Go source files that, once compiled, can access and manage +protocol buffers; and a library that implements run-time support for +encoding (marshaling), decoding (unmarshaling), and accessing protocol +buffers. + +There is support for gRPC in Go using protocol buffers. +See the note at the bottom of this file for details. + +There are no insertion points in the plugin. + + +## Using protocol buffers with Go ## + +Once the software is installed, there are two steps to using it. +First you must compile the protocol buffer definitions and then import +them, with the support library, into your program. + +To compile the protocol buffer definition, run protoc with the --go_out +parameter set to the directory you want to output the Go code to. + + protoc --go_out=. *.proto + +The generated files will be suffixed .pb.go. See the Test code below +for an example using such a file. + + +The package comment for the proto library contains text describing +the interface provided in Go for protocol buffers. Here is an edited +version. + +========== + +The proto package converts data structures to and from the +wire format of protocol buffers. It works in concert with the +Go source code generated for .proto files by the protocol compiler. + +A summary of the properties of the protocol buffer interface +for a protocol buffer variable v: + + - Names are turned from camel_case to CamelCase for export. + - There are no methods on v to set fields; just treat + them as structure fields. + - There are getters that return a field's value if set, + and return the field's default value if unset. + The getters work even if the receiver is a nil message. + - The zero value for a struct is its correct initialization state. + All desired fields must be set before marshaling. + - A Reset() method will restore a protobuf struct to its zero state. + - Non-repeated fields are pointers to the values; nil means unset. + That is, optional or required field int32 f becomes F *int32. + - Repeated fields are slices. + - Helper functions are available to aid the setting of fields. + Helpers for getting values are superseded by the + GetFoo methods and their use is deprecated. + msg.Foo = proto.String("hello") // set field + - Constants are defined to hold the default values of all fields that + have them. They have the form Default_StructName_FieldName. + Because the getter methods handle defaulted values, + direct use of these constants should be rare. + - Enums are given type names and maps from names to values. + Enum values are prefixed with the enum's type name. Enum types have + a String method, and a Enum method to assist in message construction. + - Nested groups and enums have type names prefixed with the name of + the surrounding message type. + - Extensions are given descriptor names that start with E_, + followed by an underscore-delimited list of the nested messages + that contain it (if any) followed by the CamelCased name of the + extension field itself. HasExtension, ClearExtension, GetExtension + and SetExtension are functions for manipulating extensions. + - Oneof field sets are given a single field in their message, + with distinguished wrapper types for each possible field value. + - Marshal and Unmarshal are functions to encode and decode the wire format. + +When the .proto file specifies `syntax="proto3"`, there are some differences: + + - Non-repeated fields of non-message type are values instead of pointers. + - Enum types do not get an Enum method. + +Consider file test.proto, containing + +```proto + syntax = "proto2"; + package example; + + enum FOO { X = 17; }; + + message Test { + required string label = 1; + optional int32 type = 2 [default=77]; + repeated int64 reps = 3; + optional group OptionalGroup = 4 { + required string RequiredField = 5; + } + } +``` + +To create and play with a Test object from the example package, + +```go + package main + + import ( + "log" + + "github.com/golang/protobuf/proto" + "path/to/example" + ) + + func main() { + test := &example.Test { + Label: proto.String("hello"), + Type: proto.Int32(17), + Reps: []int64{1, 2, 3}, + Optionalgroup: &example.Test_OptionalGroup { + RequiredField: proto.String("good bye"), + }, + } + data, err := proto.Marshal(test) + if err != nil { + log.Fatal("marshaling error: ", err) + } + newTest := &example.Test{} + err = proto.Unmarshal(data, newTest) + if err != nil { + log.Fatal("unmarshaling error: ", err) + } + // Now test and newTest contain the same data. + if test.GetLabel() != newTest.GetLabel() { + log.Fatalf("data mismatch %q != %q", test.GetLabel(), newTest.GetLabel()) + } + // etc. + } +``` + +## Parameters ## + +To pass extra parameters to the plugin, use a comma-separated +parameter list separated from the output directory by a colon: + + + protoc --go_out=plugins=grpc,import_path=mypackage:. *.proto + + +- `import_prefix=xxx` - a prefix that is added onto the beginning of + all imports. Useful for things like generating protos in a + subdirectory, or regenerating vendored protobufs in-place. +- `import_path=foo/bar` - used as the package if no input files + declare `go_package`. If it contains slashes, everything up to the + rightmost slash is ignored. +- `plugins=plugin1+plugin2` - specifies the list of sub-plugins to + load. The only plugin in this repo is `grpc`. +- `Mfoo/bar.proto=quux/shme` - declares that foo/bar.proto is + associated with Go package quux/shme. This is subject to the + import_prefix parameter. + +## gRPC Support ## + +If a proto file specifies RPC services, protoc-gen-go can be instructed to +generate code compatible with gRPC (http://www.grpc.io/). To do this, pass +the `plugins` parameter to protoc-gen-go; the usual way is to insert it into +the --go_out argument to protoc: + + protoc --go_out=plugins=grpc:. *.proto + +## Compatibility ## + +The library and the generated code are expected to be stable over time. +However, we reserve the right to make breaking changes without notice for the +following reasons: + +- Security. A security issue in the specification or implementation may come to + light whose resolution requires breaking compatibility. We reserve the right + to address such security issues. +- Unspecified behavior. There are some aspects of the Protocol Buffers + specification that are undefined. Programs that depend on such unspecified + behavior may break in future releases. +- Specification errors or changes. If it becomes necessary to address an + inconsistency, incompleteness, or change in the Protocol Buffers + specification, resolving the issue could affect the meaning or legality of + existing programs. We reserve the right to address such issues, including + updating the implementations. +- Bugs. If the library has a bug that violates the specification, a program + that depends on the buggy behavior may break if the bug is fixed. We reserve + the right to fix such bugs. +- Adding methods or fields to generated structs. These may conflict with field + names that already exist in a schema, causing applications to break. When the + code generator encounters a field in the schema that would collide with a + generated field or method name, the code generator will append an underscore + to the generated field or method name. +- Adding, removing, or changing methods or fields in generated structs that + start with `XXX`. These parts of the generated code are exported out of + necessity, but should not be considered part of the public API. +- Adding, removing, or changing unexported symbols in generated code. + +Any breaking changes outside of these will be announced 6 months in advance to +protobuf@googlegroups.com. + +You should, whenever possible, use generated code created by the `protoc-gen-go` +tool built at the same commit as the `proto` package. The `proto` package +declares package-level constants in the form `ProtoPackageIsVersionX`. +Application code and generated code may depend on one of these constants to +ensure that compilation will fail if the available version of the proto library +is too old. Whenever we make a change to the generated code that requires newer +library support, in the same commit we will increment the version number of the +generated code and declare a new package-level constant whose name incorporates +the latest version number. Removing a compatibility constant is considered a +breaking change and would be subject to the announcement policy stated above. + +The `protoc-gen-go/generator` package exposes a plugin interface, +which is used by the gRPC code generation. This interface is not +supported and is subject to incompatible changes without notice. diff --git a/vendor/github.com/golang/protobuf/_conformance/Makefile b/vendor/github.com/golang/protobuf/_conformance/Makefile new file mode 100644 index 000000000..89800e2d9 --- /dev/null +++ b/vendor/github.com/golang/protobuf/_conformance/Makefile @@ -0,0 +1,33 @@ +# Go support for Protocol Buffers - Google's data interchange format +# +# Copyright 2016 The Go Authors. All rights reserved. +# https://github.com/golang/protobuf +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +regenerate: + protoc --go_out=Mgoogle/protobuf/any.proto=github.com/golang/protobuf/ptypes/any,Mgoogle/protobuf/duration.proto=github.com/golang/protobuf/ptypes/duration,Mgoogle/protobuf/struct.proto=github.com/golang/protobuf/ptypes/struct,Mgoogle/protobuf/timestamp.proto=github.com/golang/protobuf/ptypes/timestamp,Mgoogle/protobuf/wrappers.proto=github.com/golang/protobuf/ptypes/wrappers,Mgoogle/protobuf/field_mask.proto=google.golang.org/genproto/protobuf:. conformance_proto/conformance.proto diff --git a/vendor/github.com/golang/protobuf/_conformance/conformance.go b/vendor/github.com/golang/protobuf/_conformance/conformance.go new file mode 100644 index 000000000..c54212c80 --- /dev/null +++ b/vendor/github.com/golang/protobuf/_conformance/conformance.go @@ -0,0 +1,161 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2016 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// conformance implements the conformance test subprocess protocol as +// documented in conformance.proto. +package main + +import ( + "encoding/binary" + "fmt" + "io" + "os" + + pb "github.com/golang/protobuf/_conformance/conformance_proto" + "github.com/golang/protobuf/jsonpb" + "github.com/golang/protobuf/proto" +) + +func main() { + var sizeBuf [4]byte + inbuf := make([]byte, 0, 4096) + outbuf := proto.NewBuffer(nil) + for { + if _, err := io.ReadFull(os.Stdin, sizeBuf[:]); err == io.EOF { + break + } else if err != nil { + fmt.Fprintln(os.Stderr, "go conformance: read request:", err) + os.Exit(1) + } + size := binary.LittleEndian.Uint32(sizeBuf[:]) + if int(size) > cap(inbuf) { + inbuf = make([]byte, size) + } + inbuf = inbuf[:size] + if _, err := io.ReadFull(os.Stdin, inbuf); err != nil { + fmt.Fprintln(os.Stderr, "go conformance: read request:", err) + os.Exit(1) + } + + req := new(pb.ConformanceRequest) + if err := proto.Unmarshal(inbuf, req); err != nil { + fmt.Fprintln(os.Stderr, "go conformance: parse request:", err) + os.Exit(1) + } + res := handle(req) + + if err := outbuf.Marshal(res); err != nil { + fmt.Fprintln(os.Stderr, "go conformance: marshal response:", err) + os.Exit(1) + } + binary.LittleEndian.PutUint32(sizeBuf[:], uint32(len(outbuf.Bytes()))) + if _, err := os.Stdout.Write(sizeBuf[:]); err != nil { + fmt.Fprintln(os.Stderr, "go conformance: write response:", err) + os.Exit(1) + } + if _, err := os.Stdout.Write(outbuf.Bytes()); err != nil { + fmt.Fprintln(os.Stderr, "go conformance: write response:", err) + os.Exit(1) + } + outbuf.Reset() + } +} + +var jsonMarshaler = jsonpb.Marshaler{ + OrigName: true, +} + +func handle(req *pb.ConformanceRequest) *pb.ConformanceResponse { + var err error + var msg pb.TestAllTypes + switch p := req.Payload.(type) { + case *pb.ConformanceRequest_ProtobufPayload: + err = proto.Unmarshal(p.ProtobufPayload, &msg) + case *pb.ConformanceRequest_JsonPayload: + err = jsonpb.UnmarshalString(p.JsonPayload, &msg) + if err != nil && err.Error() == "unmarshaling Any not supported yet" { + return &pb.ConformanceResponse{ + Result: &pb.ConformanceResponse_Skipped{ + Skipped: err.Error(), + }, + } + } + default: + return &pb.ConformanceResponse{ + Result: &pb.ConformanceResponse_RuntimeError{ + RuntimeError: "unknown request payload type", + }, + } + } + if err != nil { + return &pb.ConformanceResponse{ + Result: &pb.ConformanceResponse_ParseError{ + ParseError: err.Error(), + }, + } + } + switch req.RequestedOutputFormat { + case pb.WireFormat_PROTOBUF: + p, err := proto.Marshal(&msg) + if err != nil { + return &pb.ConformanceResponse{ + Result: &pb.ConformanceResponse_SerializeError{ + SerializeError: err.Error(), + }, + } + } + return &pb.ConformanceResponse{ + Result: &pb.ConformanceResponse_ProtobufPayload{ + ProtobufPayload: p, + }, + } + case pb.WireFormat_JSON: + p, err := jsonMarshaler.MarshalToString(&msg) + if err != nil { + return &pb.ConformanceResponse{ + Result: &pb.ConformanceResponse_SerializeError{ + SerializeError: err.Error(), + }, + } + } + return &pb.ConformanceResponse{ + Result: &pb.ConformanceResponse_JsonPayload{ + JsonPayload: p, + }, + } + default: + return &pb.ConformanceResponse{ + Result: &pb.ConformanceResponse_RuntimeError{ + RuntimeError: "unknown output format", + }, + } + } +} diff --git a/vendor/github.com/golang/protobuf/_conformance/conformance_proto/conformance.pb.go b/vendor/github.com/golang/protobuf/_conformance/conformance_proto/conformance.pb.go new file mode 100644 index 000000000..ec354eada --- /dev/null +++ b/vendor/github.com/golang/protobuf/_conformance/conformance_proto/conformance.pb.go @@ -0,0 +1,1885 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: conformance_proto/conformance.proto + +/* +Package conformance is a generated protocol buffer package. + +It is generated from these files: + conformance_proto/conformance.proto + +It has these top-level messages: + ConformanceRequest + ConformanceResponse + TestAllTypes + ForeignMessage +*/ +package conformance + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import google_protobuf "github.com/golang/protobuf/ptypes/any" +import google_protobuf1 "github.com/golang/protobuf/ptypes/duration" +import google_protobuf2 "google.golang.org/genproto/protobuf" +import google_protobuf3 "github.com/golang/protobuf/ptypes/struct" +import google_protobuf4 "github.com/golang/protobuf/ptypes/timestamp" +import google_protobuf5 "github.com/golang/protobuf/ptypes/wrappers" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type WireFormat int32 + +const ( + WireFormat_UNSPECIFIED WireFormat = 0 + WireFormat_PROTOBUF WireFormat = 1 + WireFormat_JSON WireFormat = 2 +) + +var WireFormat_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "PROTOBUF", + 2: "JSON", +} +var WireFormat_value = map[string]int32{ + "UNSPECIFIED": 0, + "PROTOBUF": 1, + "JSON": 2, +} + +func (x WireFormat) String() string { + return proto.EnumName(WireFormat_name, int32(x)) +} +func (WireFormat) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } + +type ForeignEnum int32 + +const ( + ForeignEnum_FOREIGN_FOO ForeignEnum = 0 + ForeignEnum_FOREIGN_BAR ForeignEnum = 1 + ForeignEnum_FOREIGN_BAZ ForeignEnum = 2 +) + +var ForeignEnum_name = map[int32]string{ + 0: "FOREIGN_FOO", + 1: "FOREIGN_BAR", + 2: "FOREIGN_BAZ", +} +var ForeignEnum_value = map[string]int32{ + "FOREIGN_FOO": 0, + "FOREIGN_BAR": 1, + "FOREIGN_BAZ": 2, +} + +func (x ForeignEnum) String() string { + return proto.EnumName(ForeignEnum_name, int32(x)) +} +func (ForeignEnum) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } + +type TestAllTypes_NestedEnum int32 + +const ( + TestAllTypes_FOO TestAllTypes_NestedEnum = 0 + TestAllTypes_BAR TestAllTypes_NestedEnum = 1 + TestAllTypes_BAZ TestAllTypes_NestedEnum = 2 + TestAllTypes_NEG TestAllTypes_NestedEnum = -1 +) + +var TestAllTypes_NestedEnum_name = map[int32]string{ + 0: "FOO", + 1: "BAR", + 2: "BAZ", + -1: "NEG", +} +var TestAllTypes_NestedEnum_value = map[string]int32{ + "FOO": 0, + "BAR": 1, + "BAZ": 2, + "NEG": -1, +} + +func (x TestAllTypes_NestedEnum) String() string { + return proto.EnumName(TestAllTypes_NestedEnum_name, int32(x)) +} +func (TestAllTypes_NestedEnum) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{2, 0} } + +// Represents a single test case's input. The testee should: +// +// 1. parse this proto (which should always succeed) +// 2. parse the protobuf or JSON payload in "payload" (which may fail) +// 3. if the parse succeeded, serialize the message in the requested format. +type ConformanceRequest struct { + // The payload (whether protobuf of JSON) is always for a TestAllTypes proto + // (see below). + // + // Types that are valid to be assigned to Payload: + // *ConformanceRequest_ProtobufPayload + // *ConformanceRequest_JsonPayload + Payload isConformanceRequest_Payload `protobuf_oneof:"payload"` + // Which format should the testee serialize its message to? + RequestedOutputFormat WireFormat `protobuf:"varint,3,opt,name=requested_output_format,json=requestedOutputFormat,enum=conformance.WireFormat" json:"requested_output_format,omitempty"` +} + +func (m *ConformanceRequest) Reset() { *m = ConformanceRequest{} } +func (m *ConformanceRequest) String() string { return proto.CompactTextString(m) } +func (*ConformanceRequest) ProtoMessage() {} +func (*ConformanceRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } + +type isConformanceRequest_Payload interface { + isConformanceRequest_Payload() +} + +type ConformanceRequest_ProtobufPayload struct { + ProtobufPayload []byte `protobuf:"bytes,1,opt,name=protobuf_payload,json=protobufPayload,proto3,oneof"` +} +type ConformanceRequest_JsonPayload struct { + JsonPayload string `protobuf:"bytes,2,opt,name=json_payload,json=jsonPayload,oneof"` +} + +func (*ConformanceRequest_ProtobufPayload) isConformanceRequest_Payload() {} +func (*ConformanceRequest_JsonPayload) isConformanceRequest_Payload() {} + +func (m *ConformanceRequest) GetPayload() isConformanceRequest_Payload { + if m != nil { + return m.Payload + } + return nil +} + +func (m *ConformanceRequest) GetProtobufPayload() []byte { + if x, ok := m.GetPayload().(*ConformanceRequest_ProtobufPayload); ok { + return x.ProtobufPayload + } + return nil +} + +func (m *ConformanceRequest) GetJsonPayload() string { + if x, ok := m.GetPayload().(*ConformanceRequest_JsonPayload); ok { + return x.JsonPayload + } + return "" +} + +func (m *ConformanceRequest) GetRequestedOutputFormat() WireFormat { + if m != nil { + return m.RequestedOutputFormat + } + return WireFormat_UNSPECIFIED +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ConformanceRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ConformanceRequest_OneofMarshaler, _ConformanceRequest_OneofUnmarshaler, _ConformanceRequest_OneofSizer, []interface{}{ + (*ConformanceRequest_ProtobufPayload)(nil), + (*ConformanceRequest_JsonPayload)(nil), + } +} + +func _ConformanceRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ConformanceRequest) + // payload + switch x := m.Payload.(type) { + case *ConformanceRequest_ProtobufPayload: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeRawBytes(x.ProtobufPayload) + case *ConformanceRequest_JsonPayload: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.JsonPayload) + case nil: + default: + return fmt.Errorf("ConformanceRequest.Payload has unexpected type %T", x) + } + return nil +} + +func _ConformanceRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ConformanceRequest) + switch tag { + case 1: // payload.protobuf_payload + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Payload = &ConformanceRequest_ProtobufPayload{x} + return true, err + case 2: // payload.json_payload + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Payload = &ConformanceRequest_JsonPayload{x} + return true, err + default: + return false, nil + } +} + +func _ConformanceRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ConformanceRequest) + // payload + switch x := m.Payload.(type) { + case *ConformanceRequest_ProtobufPayload: + n += proto.SizeVarint(1<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(len(x.ProtobufPayload))) + n += len(x.ProtobufPayload) + case *ConformanceRequest_JsonPayload: + n += proto.SizeVarint(2<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(len(x.JsonPayload))) + n += len(x.JsonPayload) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Represents a single test case's output. +type ConformanceResponse struct { + // Types that are valid to be assigned to Result: + // *ConformanceResponse_ParseError + // *ConformanceResponse_SerializeError + // *ConformanceResponse_RuntimeError + // *ConformanceResponse_ProtobufPayload + // *ConformanceResponse_JsonPayload + // *ConformanceResponse_Skipped + Result isConformanceResponse_Result `protobuf_oneof:"result"` +} + +func (m *ConformanceResponse) Reset() { *m = ConformanceResponse{} } +func (m *ConformanceResponse) String() string { return proto.CompactTextString(m) } +func (*ConformanceResponse) ProtoMessage() {} +func (*ConformanceResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } + +type isConformanceResponse_Result interface { + isConformanceResponse_Result() +} + +type ConformanceResponse_ParseError struct { + ParseError string `protobuf:"bytes,1,opt,name=parse_error,json=parseError,oneof"` +} +type ConformanceResponse_SerializeError struct { + SerializeError string `protobuf:"bytes,6,opt,name=serialize_error,json=serializeError,oneof"` +} +type ConformanceResponse_RuntimeError struct { + RuntimeError string `protobuf:"bytes,2,opt,name=runtime_error,json=runtimeError,oneof"` +} +type ConformanceResponse_ProtobufPayload struct { + ProtobufPayload []byte `protobuf:"bytes,3,opt,name=protobuf_payload,json=protobufPayload,proto3,oneof"` +} +type ConformanceResponse_JsonPayload struct { + JsonPayload string `protobuf:"bytes,4,opt,name=json_payload,json=jsonPayload,oneof"` +} +type ConformanceResponse_Skipped struct { + Skipped string `protobuf:"bytes,5,opt,name=skipped,oneof"` +} + +func (*ConformanceResponse_ParseError) isConformanceResponse_Result() {} +func (*ConformanceResponse_SerializeError) isConformanceResponse_Result() {} +func (*ConformanceResponse_RuntimeError) isConformanceResponse_Result() {} +func (*ConformanceResponse_ProtobufPayload) isConformanceResponse_Result() {} +func (*ConformanceResponse_JsonPayload) isConformanceResponse_Result() {} +func (*ConformanceResponse_Skipped) isConformanceResponse_Result() {} + +func (m *ConformanceResponse) GetResult() isConformanceResponse_Result { + if m != nil { + return m.Result + } + return nil +} + +func (m *ConformanceResponse) GetParseError() string { + if x, ok := m.GetResult().(*ConformanceResponse_ParseError); ok { + return x.ParseError + } + return "" +} + +func (m *ConformanceResponse) GetSerializeError() string { + if x, ok := m.GetResult().(*ConformanceResponse_SerializeError); ok { + return x.SerializeError + } + return "" +} + +func (m *ConformanceResponse) GetRuntimeError() string { + if x, ok := m.GetResult().(*ConformanceResponse_RuntimeError); ok { + return x.RuntimeError + } + return "" +} + +func (m *ConformanceResponse) GetProtobufPayload() []byte { + if x, ok := m.GetResult().(*ConformanceResponse_ProtobufPayload); ok { + return x.ProtobufPayload + } + return nil +} + +func (m *ConformanceResponse) GetJsonPayload() string { + if x, ok := m.GetResult().(*ConformanceResponse_JsonPayload); ok { + return x.JsonPayload + } + return "" +} + +func (m *ConformanceResponse) GetSkipped() string { + if x, ok := m.GetResult().(*ConformanceResponse_Skipped); ok { + return x.Skipped + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ConformanceResponse) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ConformanceResponse_OneofMarshaler, _ConformanceResponse_OneofUnmarshaler, _ConformanceResponse_OneofSizer, []interface{}{ + (*ConformanceResponse_ParseError)(nil), + (*ConformanceResponse_SerializeError)(nil), + (*ConformanceResponse_RuntimeError)(nil), + (*ConformanceResponse_ProtobufPayload)(nil), + (*ConformanceResponse_JsonPayload)(nil), + (*ConformanceResponse_Skipped)(nil), + } +} + +func _ConformanceResponse_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ConformanceResponse) + // result + switch x := m.Result.(type) { + case *ConformanceResponse_ParseError: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.ParseError) + case *ConformanceResponse_SerializeError: + b.EncodeVarint(6<<3 | proto.WireBytes) + b.EncodeStringBytes(x.SerializeError) + case *ConformanceResponse_RuntimeError: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.RuntimeError) + case *ConformanceResponse_ProtobufPayload: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeRawBytes(x.ProtobufPayload) + case *ConformanceResponse_JsonPayload: + b.EncodeVarint(4<<3 | proto.WireBytes) + b.EncodeStringBytes(x.JsonPayload) + case *ConformanceResponse_Skipped: + b.EncodeVarint(5<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Skipped) + case nil: + default: + return fmt.Errorf("ConformanceResponse.Result has unexpected type %T", x) + } + return nil +} + +func _ConformanceResponse_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ConformanceResponse) + switch tag { + case 1: // result.parse_error + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Result = &ConformanceResponse_ParseError{x} + return true, err + case 6: // result.serialize_error + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Result = &ConformanceResponse_SerializeError{x} + return true, err + case 2: // result.runtime_error + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Result = &ConformanceResponse_RuntimeError{x} + return true, err + case 3: // result.protobuf_payload + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Result = &ConformanceResponse_ProtobufPayload{x} + return true, err + case 4: // result.json_payload + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Result = &ConformanceResponse_JsonPayload{x} + return true, err + case 5: // result.skipped + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Result = &ConformanceResponse_Skipped{x} + return true, err + default: + return false, nil + } +} + +func _ConformanceResponse_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ConformanceResponse) + // result + switch x := m.Result.(type) { + case *ConformanceResponse_ParseError: + n += proto.SizeVarint(1<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(len(x.ParseError))) + n += len(x.ParseError) + case *ConformanceResponse_SerializeError: + n += proto.SizeVarint(6<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(len(x.SerializeError))) + n += len(x.SerializeError) + case *ConformanceResponse_RuntimeError: + n += proto.SizeVarint(2<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(len(x.RuntimeError))) + n += len(x.RuntimeError) + case *ConformanceResponse_ProtobufPayload: + n += proto.SizeVarint(3<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(len(x.ProtobufPayload))) + n += len(x.ProtobufPayload) + case *ConformanceResponse_JsonPayload: + n += proto.SizeVarint(4<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(len(x.JsonPayload))) + n += len(x.JsonPayload) + case *ConformanceResponse_Skipped: + n += proto.SizeVarint(5<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(len(x.Skipped))) + n += len(x.Skipped) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// This proto includes every type of field in both singular and repeated +// forms. +type TestAllTypes struct { + // Singular + OptionalInt32 int32 `protobuf:"varint,1,opt,name=optional_int32,json=optionalInt32" json:"optional_int32,omitempty"` + OptionalInt64 int64 `protobuf:"varint,2,opt,name=optional_int64,json=optionalInt64" json:"optional_int64,omitempty"` + OptionalUint32 uint32 `protobuf:"varint,3,opt,name=optional_uint32,json=optionalUint32" json:"optional_uint32,omitempty"` + OptionalUint64 uint64 `protobuf:"varint,4,opt,name=optional_uint64,json=optionalUint64" json:"optional_uint64,omitempty"` + OptionalSint32 int32 `protobuf:"zigzag32,5,opt,name=optional_sint32,json=optionalSint32" json:"optional_sint32,omitempty"` + OptionalSint64 int64 `protobuf:"zigzag64,6,opt,name=optional_sint64,json=optionalSint64" json:"optional_sint64,omitempty"` + OptionalFixed32 uint32 `protobuf:"fixed32,7,opt,name=optional_fixed32,json=optionalFixed32" json:"optional_fixed32,omitempty"` + OptionalFixed64 uint64 `protobuf:"fixed64,8,opt,name=optional_fixed64,json=optionalFixed64" json:"optional_fixed64,omitempty"` + OptionalSfixed32 int32 `protobuf:"fixed32,9,opt,name=optional_sfixed32,json=optionalSfixed32" json:"optional_sfixed32,omitempty"` + OptionalSfixed64 int64 `protobuf:"fixed64,10,opt,name=optional_sfixed64,json=optionalSfixed64" json:"optional_sfixed64,omitempty"` + OptionalFloat float32 `protobuf:"fixed32,11,opt,name=optional_float,json=optionalFloat" json:"optional_float,omitempty"` + OptionalDouble float64 `protobuf:"fixed64,12,opt,name=optional_double,json=optionalDouble" json:"optional_double,omitempty"` + OptionalBool bool `protobuf:"varint,13,opt,name=optional_bool,json=optionalBool" json:"optional_bool,omitempty"` + OptionalString string `protobuf:"bytes,14,opt,name=optional_string,json=optionalString" json:"optional_string,omitempty"` + OptionalBytes []byte `protobuf:"bytes,15,opt,name=optional_bytes,json=optionalBytes,proto3" json:"optional_bytes,omitempty"` + OptionalNestedMessage *TestAllTypes_NestedMessage `protobuf:"bytes,18,opt,name=optional_nested_message,json=optionalNestedMessage" json:"optional_nested_message,omitempty"` + OptionalForeignMessage *ForeignMessage `protobuf:"bytes,19,opt,name=optional_foreign_message,json=optionalForeignMessage" json:"optional_foreign_message,omitempty"` + OptionalNestedEnum TestAllTypes_NestedEnum `protobuf:"varint,21,opt,name=optional_nested_enum,json=optionalNestedEnum,enum=conformance.TestAllTypes_NestedEnum" json:"optional_nested_enum,omitempty"` + OptionalForeignEnum ForeignEnum `protobuf:"varint,22,opt,name=optional_foreign_enum,json=optionalForeignEnum,enum=conformance.ForeignEnum" json:"optional_foreign_enum,omitempty"` + OptionalStringPiece string `protobuf:"bytes,24,opt,name=optional_string_piece,json=optionalStringPiece" json:"optional_string_piece,omitempty"` + OptionalCord string `protobuf:"bytes,25,opt,name=optional_cord,json=optionalCord" json:"optional_cord,omitempty"` + RecursiveMessage *TestAllTypes `protobuf:"bytes,27,opt,name=recursive_message,json=recursiveMessage" json:"recursive_message,omitempty"` + // Repeated + RepeatedInt32 []int32 `protobuf:"varint,31,rep,packed,name=repeated_int32,json=repeatedInt32" json:"repeated_int32,omitempty"` + RepeatedInt64 []int64 `protobuf:"varint,32,rep,packed,name=repeated_int64,json=repeatedInt64" json:"repeated_int64,omitempty"` + RepeatedUint32 []uint32 `protobuf:"varint,33,rep,packed,name=repeated_uint32,json=repeatedUint32" json:"repeated_uint32,omitempty"` + RepeatedUint64 []uint64 `protobuf:"varint,34,rep,packed,name=repeated_uint64,json=repeatedUint64" json:"repeated_uint64,omitempty"` + RepeatedSint32 []int32 `protobuf:"zigzag32,35,rep,packed,name=repeated_sint32,json=repeatedSint32" json:"repeated_sint32,omitempty"` + RepeatedSint64 []int64 `protobuf:"zigzag64,36,rep,packed,name=repeated_sint64,json=repeatedSint64" json:"repeated_sint64,omitempty"` + RepeatedFixed32 []uint32 `protobuf:"fixed32,37,rep,packed,name=repeated_fixed32,json=repeatedFixed32" json:"repeated_fixed32,omitempty"` + RepeatedFixed64 []uint64 `protobuf:"fixed64,38,rep,packed,name=repeated_fixed64,json=repeatedFixed64" json:"repeated_fixed64,omitempty"` + RepeatedSfixed32 []int32 `protobuf:"fixed32,39,rep,packed,name=repeated_sfixed32,json=repeatedSfixed32" json:"repeated_sfixed32,omitempty"` + RepeatedSfixed64 []int64 `protobuf:"fixed64,40,rep,packed,name=repeated_sfixed64,json=repeatedSfixed64" json:"repeated_sfixed64,omitempty"` + RepeatedFloat []float32 `protobuf:"fixed32,41,rep,packed,name=repeated_float,json=repeatedFloat" json:"repeated_float,omitempty"` + RepeatedDouble []float64 `protobuf:"fixed64,42,rep,packed,name=repeated_double,json=repeatedDouble" json:"repeated_double,omitempty"` + RepeatedBool []bool `protobuf:"varint,43,rep,packed,name=repeated_bool,json=repeatedBool" json:"repeated_bool,omitempty"` + RepeatedString []string `protobuf:"bytes,44,rep,name=repeated_string,json=repeatedString" json:"repeated_string,omitempty"` + RepeatedBytes [][]byte `protobuf:"bytes,45,rep,name=repeated_bytes,json=repeatedBytes,proto3" json:"repeated_bytes,omitempty"` + RepeatedNestedMessage []*TestAllTypes_NestedMessage `protobuf:"bytes,48,rep,name=repeated_nested_message,json=repeatedNestedMessage" json:"repeated_nested_message,omitempty"` + RepeatedForeignMessage []*ForeignMessage `protobuf:"bytes,49,rep,name=repeated_foreign_message,json=repeatedForeignMessage" json:"repeated_foreign_message,omitempty"` + RepeatedNestedEnum []TestAllTypes_NestedEnum `protobuf:"varint,51,rep,packed,name=repeated_nested_enum,json=repeatedNestedEnum,enum=conformance.TestAllTypes_NestedEnum" json:"repeated_nested_enum,omitempty"` + RepeatedForeignEnum []ForeignEnum `protobuf:"varint,52,rep,packed,name=repeated_foreign_enum,json=repeatedForeignEnum,enum=conformance.ForeignEnum" json:"repeated_foreign_enum,omitempty"` + RepeatedStringPiece []string `protobuf:"bytes,54,rep,name=repeated_string_piece,json=repeatedStringPiece" json:"repeated_string_piece,omitempty"` + RepeatedCord []string `protobuf:"bytes,55,rep,name=repeated_cord,json=repeatedCord" json:"repeated_cord,omitempty"` + // Map + MapInt32Int32 map[int32]int32 `protobuf:"bytes,56,rep,name=map_int32_int32,json=mapInt32Int32" json:"map_int32_int32,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` + MapInt64Int64 map[int64]int64 `protobuf:"bytes,57,rep,name=map_int64_int64,json=mapInt64Int64" json:"map_int64_int64,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` + MapUint32Uint32 map[uint32]uint32 `protobuf:"bytes,58,rep,name=map_uint32_uint32,json=mapUint32Uint32" json:"map_uint32_uint32,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` + MapUint64Uint64 map[uint64]uint64 `protobuf:"bytes,59,rep,name=map_uint64_uint64,json=mapUint64Uint64" json:"map_uint64_uint64,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` + MapSint32Sint32 map[int32]int32 `protobuf:"bytes,60,rep,name=map_sint32_sint32,json=mapSint32Sint32" json:"map_sint32_sint32,omitempty" protobuf_key:"zigzag32,1,opt,name=key" protobuf_val:"zigzag32,2,opt,name=value"` + MapSint64Sint64 map[int64]int64 `protobuf:"bytes,61,rep,name=map_sint64_sint64,json=mapSint64Sint64" json:"map_sint64_sint64,omitempty" protobuf_key:"zigzag64,1,opt,name=key" protobuf_val:"zigzag64,2,opt,name=value"` + MapFixed32Fixed32 map[uint32]uint32 `protobuf:"bytes,62,rep,name=map_fixed32_fixed32,json=mapFixed32Fixed32" json:"map_fixed32_fixed32,omitempty" protobuf_key:"fixed32,1,opt,name=key" protobuf_val:"fixed32,2,opt,name=value"` + MapFixed64Fixed64 map[uint64]uint64 `protobuf:"bytes,63,rep,name=map_fixed64_fixed64,json=mapFixed64Fixed64" json:"map_fixed64_fixed64,omitempty" protobuf_key:"fixed64,1,opt,name=key" protobuf_val:"fixed64,2,opt,name=value"` + MapSfixed32Sfixed32 map[int32]int32 `protobuf:"bytes,64,rep,name=map_sfixed32_sfixed32,json=mapSfixed32Sfixed32" json:"map_sfixed32_sfixed32,omitempty" protobuf_key:"fixed32,1,opt,name=key" protobuf_val:"fixed32,2,opt,name=value"` + MapSfixed64Sfixed64 map[int64]int64 `protobuf:"bytes,65,rep,name=map_sfixed64_sfixed64,json=mapSfixed64Sfixed64" json:"map_sfixed64_sfixed64,omitempty" protobuf_key:"fixed64,1,opt,name=key" protobuf_val:"fixed64,2,opt,name=value"` + MapInt32Float map[int32]float32 `protobuf:"bytes,66,rep,name=map_int32_float,json=mapInt32Float" json:"map_int32_float,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"fixed32,2,opt,name=value"` + MapInt32Double map[int32]float64 `protobuf:"bytes,67,rep,name=map_int32_double,json=mapInt32Double" json:"map_int32_double,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"fixed64,2,opt,name=value"` + MapBoolBool map[bool]bool `protobuf:"bytes,68,rep,name=map_bool_bool,json=mapBoolBool" json:"map_bool_bool,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` + MapStringString map[string]string `protobuf:"bytes,69,rep,name=map_string_string,json=mapStringString" json:"map_string_string,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + MapStringBytes map[string][]byte `protobuf:"bytes,70,rep,name=map_string_bytes,json=mapStringBytes" json:"map_string_bytes,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value,proto3"` + MapStringNestedMessage map[string]*TestAllTypes_NestedMessage `protobuf:"bytes,71,rep,name=map_string_nested_message,json=mapStringNestedMessage" json:"map_string_nested_message,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + MapStringForeignMessage map[string]*ForeignMessage `protobuf:"bytes,72,rep,name=map_string_foreign_message,json=mapStringForeignMessage" json:"map_string_foreign_message,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + MapStringNestedEnum map[string]TestAllTypes_NestedEnum `protobuf:"bytes,73,rep,name=map_string_nested_enum,json=mapStringNestedEnum" json:"map_string_nested_enum,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"varint,2,opt,name=value,enum=conformance.TestAllTypes_NestedEnum"` + MapStringForeignEnum map[string]ForeignEnum `protobuf:"bytes,74,rep,name=map_string_foreign_enum,json=mapStringForeignEnum" json:"map_string_foreign_enum,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"varint,2,opt,name=value,enum=conformance.ForeignEnum"` + // Types that are valid to be assigned to OneofField: + // *TestAllTypes_OneofUint32 + // *TestAllTypes_OneofNestedMessage + // *TestAllTypes_OneofString + // *TestAllTypes_OneofBytes + // *TestAllTypes_OneofBool + // *TestAllTypes_OneofUint64 + // *TestAllTypes_OneofFloat + // *TestAllTypes_OneofDouble + // *TestAllTypes_OneofEnum + OneofField isTestAllTypes_OneofField `protobuf_oneof:"oneof_field"` + // Well-known types + OptionalBoolWrapper *google_protobuf5.BoolValue `protobuf:"bytes,201,opt,name=optional_bool_wrapper,json=optionalBoolWrapper" json:"optional_bool_wrapper,omitempty"` + OptionalInt32Wrapper *google_protobuf5.Int32Value `protobuf:"bytes,202,opt,name=optional_int32_wrapper,json=optionalInt32Wrapper" json:"optional_int32_wrapper,omitempty"` + OptionalInt64Wrapper *google_protobuf5.Int64Value `protobuf:"bytes,203,opt,name=optional_int64_wrapper,json=optionalInt64Wrapper" json:"optional_int64_wrapper,omitempty"` + OptionalUint32Wrapper *google_protobuf5.UInt32Value `protobuf:"bytes,204,opt,name=optional_uint32_wrapper,json=optionalUint32Wrapper" json:"optional_uint32_wrapper,omitempty"` + OptionalUint64Wrapper *google_protobuf5.UInt64Value `protobuf:"bytes,205,opt,name=optional_uint64_wrapper,json=optionalUint64Wrapper" json:"optional_uint64_wrapper,omitempty"` + OptionalFloatWrapper *google_protobuf5.FloatValue `protobuf:"bytes,206,opt,name=optional_float_wrapper,json=optionalFloatWrapper" json:"optional_float_wrapper,omitempty"` + OptionalDoubleWrapper *google_protobuf5.DoubleValue `protobuf:"bytes,207,opt,name=optional_double_wrapper,json=optionalDoubleWrapper" json:"optional_double_wrapper,omitempty"` + OptionalStringWrapper *google_protobuf5.StringValue `protobuf:"bytes,208,opt,name=optional_string_wrapper,json=optionalStringWrapper" json:"optional_string_wrapper,omitempty"` + OptionalBytesWrapper *google_protobuf5.BytesValue `protobuf:"bytes,209,opt,name=optional_bytes_wrapper,json=optionalBytesWrapper" json:"optional_bytes_wrapper,omitempty"` + RepeatedBoolWrapper []*google_protobuf5.BoolValue `protobuf:"bytes,211,rep,name=repeated_bool_wrapper,json=repeatedBoolWrapper" json:"repeated_bool_wrapper,omitempty"` + RepeatedInt32Wrapper []*google_protobuf5.Int32Value `protobuf:"bytes,212,rep,name=repeated_int32_wrapper,json=repeatedInt32Wrapper" json:"repeated_int32_wrapper,omitempty"` + RepeatedInt64Wrapper []*google_protobuf5.Int64Value `protobuf:"bytes,213,rep,name=repeated_int64_wrapper,json=repeatedInt64Wrapper" json:"repeated_int64_wrapper,omitempty"` + RepeatedUint32Wrapper []*google_protobuf5.UInt32Value `protobuf:"bytes,214,rep,name=repeated_uint32_wrapper,json=repeatedUint32Wrapper" json:"repeated_uint32_wrapper,omitempty"` + RepeatedUint64Wrapper []*google_protobuf5.UInt64Value `protobuf:"bytes,215,rep,name=repeated_uint64_wrapper,json=repeatedUint64Wrapper" json:"repeated_uint64_wrapper,omitempty"` + RepeatedFloatWrapper []*google_protobuf5.FloatValue `protobuf:"bytes,216,rep,name=repeated_float_wrapper,json=repeatedFloatWrapper" json:"repeated_float_wrapper,omitempty"` + RepeatedDoubleWrapper []*google_protobuf5.DoubleValue `protobuf:"bytes,217,rep,name=repeated_double_wrapper,json=repeatedDoubleWrapper" json:"repeated_double_wrapper,omitempty"` + RepeatedStringWrapper []*google_protobuf5.StringValue `protobuf:"bytes,218,rep,name=repeated_string_wrapper,json=repeatedStringWrapper" json:"repeated_string_wrapper,omitempty"` + RepeatedBytesWrapper []*google_protobuf5.BytesValue `protobuf:"bytes,219,rep,name=repeated_bytes_wrapper,json=repeatedBytesWrapper" json:"repeated_bytes_wrapper,omitempty"` + OptionalDuration *google_protobuf1.Duration `protobuf:"bytes,301,opt,name=optional_duration,json=optionalDuration" json:"optional_duration,omitempty"` + OptionalTimestamp *google_protobuf4.Timestamp `protobuf:"bytes,302,opt,name=optional_timestamp,json=optionalTimestamp" json:"optional_timestamp,omitempty"` + OptionalFieldMask *google_protobuf2.FieldMask `protobuf:"bytes,303,opt,name=optional_field_mask,json=optionalFieldMask" json:"optional_field_mask,omitempty"` + OptionalStruct *google_protobuf3.Struct `protobuf:"bytes,304,opt,name=optional_struct,json=optionalStruct" json:"optional_struct,omitempty"` + OptionalAny *google_protobuf.Any `protobuf:"bytes,305,opt,name=optional_any,json=optionalAny" json:"optional_any,omitempty"` + OptionalValue *google_protobuf3.Value `protobuf:"bytes,306,opt,name=optional_value,json=optionalValue" json:"optional_value,omitempty"` + RepeatedDuration []*google_protobuf1.Duration `protobuf:"bytes,311,rep,name=repeated_duration,json=repeatedDuration" json:"repeated_duration,omitempty"` + RepeatedTimestamp []*google_protobuf4.Timestamp `protobuf:"bytes,312,rep,name=repeated_timestamp,json=repeatedTimestamp" json:"repeated_timestamp,omitempty"` + RepeatedFieldmask []*google_protobuf2.FieldMask `protobuf:"bytes,313,rep,name=repeated_fieldmask,json=repeatedFieldmask" json:"repeated_fieldmask,omitempty"` + RepeatedStruct []*google_protobuf3.Struct `protobuf:"bytes,324,rep,name=repeated_struct,json=repeatedStruct" json:"repeated_struct,omitempty"` + RepeatedAny []*google_protobuf.Any `protobuf:"bytes,315,rep,name=repeated_any,json=repeatedAny" json:"repeated_any,omitempty"` + RepeatedValue []*google_protobuf3.Value `protobuf:"bytes,316,rep,name=repeated_value,json=repeatedValue" json:"repeated_value,omitempty"` + // Test field-name-to-JSON-name convention. + // (protobuf says names can be any valid C/C++ identifier.) + Fieldname1 int32 `protobuf:"varint,401,opt,name=fieldname1" json:"fieldname1,omitempty"` + FieldName2 int32 `protobuf:"varint,402,opt,name=field_name2,json=fieldName2" json:"field_name2,omitempty"` + XFieldName3 int32 `protobuf:"varint,403,opt,name=_field_name3,json=FieldName3" json:"_field_name3,omitempty"` + Field_Name4_ int32 `protobuf:"varint,404,opt,name=field__name4_,json=fieldName4" json:"field__name4_,omitempty"` + Field0Name5 int32 `protobuf:"varint,405,opt,name=field0name5" json:"field0name5,omitempty"` + Field_0Name6 int32 `protobuf:"varint,406,opt,name=field_0_name6,json=field0Name6" json:"field_0_name6,omitempty"` + FieldName7 int32 `protobuf:"varint,407,opt,name=fieldName7" json:"fieldName7,omitempty"` + FieldName8 int32 `protobuf:"varint,408,opt,name=FieldName8" json:"FieldName8,omitempty"` + Field_Name9 int32 `protobuf:"varint,409,opt,name=field_Name9,json=fieldName9" json:"field_Name9,omitempty"` + Field_Name10 int32 `protobuf:"varint,410,opt,name=Field_Name10,json=FieldName10" json:"Field_Name10,omitempty"` + FIELD_NAME11 int32 `protobuf:"varint,411,opt,name=FIELD_NAME11,json=FIELDNAME11" json:"FIELD_NAME11,omitempty"` + FIELDName12 int32 `protobuf:"varint,412,opt,name=FIELD_name12,json=FIELDName12" json:"FIELD_name12,omitempty"` + XFieldName13 int32 `protobuf:"varint,413,opt,name=__field_name13,json=FieldName13" json:"__field_name13,omitempty"` + X_FieldName14 int32 `protobuf:"varint,414,opt,name=__Field_name14,json=FieldName14" json:"__Field_name14,omitempty"` + Field_Name15 int32 `protobuf:"varint,415,opt,name=field__name15,json=fieldName15" json:"field__name15,omitempty"` + Field__Name16 int32 `protobuf:"varint,416,opt,name=field__Name16,json=fieldName16" json:"field__Name16,omitempty"` + FieldName17__ int32 `protobuf:"varint,417,opt,name=field_name17__,json=fieldName17" json:"field_name17__,omitempty"` + FieldName18__ int32 `protobuf:"varint,418,opt,name=Field_name18__,json=FieldName18" json:"Field_name18__,omitempty"` +} + +func (m *TestAllTypes) Reset() { *m = TestAllTypes{} } +func (m *TestAllTypes) String() string { return proto.CompactTextString(m) } +func (*TestAllTypes) ProtoMessage() {} +func (*TestAllTypes) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } + +type isTestAllTypes_OneofField interface { + isTestAllTypes_OneofField() +} + +type TestAllTypes_OneofUint32 struct { + OneofUint32 uint32 `protobuf:"varint,111,opt,name=oneof_uint32,json=oneofUint32,oneof"` +} +type TestAllTypes_OneofNestedMessage struct { + OneofNestedMessage *TestAllTypes_NestedMessage `protobuf:"bytes,112,opt,name=oneof_nested_message,json=oneofNestedMessage,oneof"` +} +type TestAllTypes_OneofString struct { + OneofString string `protobuf:"bytes,113,opt,name=oneof_string,json=oneofString,oneof"` +} +type TestAllTypes_OneofBytes struct { + OneofBytes []byte `protobuf:"bytes,114,opt,name=oneof_bytes,json=oneofBytes,proto3,oneof"` +} +type TestAllTypes_OneofBool struct { + OneofBool bool `protobuf:"varint,115,opt,name=oneof_bool,json=oneofBool,oneof"` +} +type TestAllTypes_OneofUint64 struct { + OneofUint64 uint64 `protobuf:"varint,116,opt,name=oneof_uint64,json=oneofUint64,oneof"` +} +type TestAllTypes_OneofFloat struct { + OneofFloat float32 `protobuf:"fixed32,117,opt,name=oneof_float,json=oneofFloat,oneof"` +} +type TestAllTypes_OneofDouble struct { + OneofDouble float64 `protobuf:"fixed64,118,opt,name=oneof_double,json=oneofDouble,oneof"` +} +type TestAllTypes_OneofEnum struct { + OneofEnum TestAllTypes_NestedEnum `protobuf:"varint,119,opt,name=oneof_enum,json=oneofEnum,enum=conformance.TestAllTypes_NestedEnum,oneof"` +} + +func (*TestAllTypes_OneofUint32) isTestAllTypes_OneofField() {} +func (*TestAllTypes_OneofNestedMessage) isTestAllTypes_OneofField() {} +func (*TestAllTypes_OneofString) isTestAllTypes_OneofField() {} +func (*TestAllTypes_OneofBytes) isTestAllTypes_OneofField() {} +func (*TestAllTypes_OneofBool) isTestAllTypes_OneofField() {} +func (*TestAllTypes_OneofUint64) isTestAllTypes_OneofField() {} +func (*TestAllTypes_OneofFloat) isTestAllTypes_OneofField() {} +func (*TestAllTypes_OneofDouble) isTestAllTypes_OneofField() {} +func (*TestAllTypes_OneofEnum) isTestAllTypes_OneofField() {} + +func (m *TestAllTypes) GetOneofField() isTestAllTypes_OneofField { + if m != nil { + return m.OneofField + } + return nil +} + +func (m *TestAllTypes) GetOptionalInt32() int32 { + if m != nil { + return m.OptionalInt32 + } + return 0 +} + +func (m *TestAllTypes) GetOptionalInt64() int64 { + if m != nil { + return m.OptionalInt64 + } + return 0 +} + +func (m *TestAllTypes) GetOptionalUint32() uint32 { + if m != nil { + return m.OptionalUint32 + } + return 0 +} + +func (m *TestAllTypes) GetOptionalUint64() uint64 { + if m != nil { + return m.OptionalUint64 + } + return 0 +} + +func (m *TestAllTypes) GetOptionalSint32() int32 { + if m != nil { + return m.OptionalSint32 + } + return 0 +} + +func (m *TestAllTypes) GetOptionalSint64() int64 { + if m != nil { + return m.OptionalSint64 + } + return 0 +} + +func (m *TestAllTypes) GetOptionalFixed32() uint32 { + if m != nil { + return m.OptionalFixed32 + } + return 0 +} + +func (m *TestAllTypes) GetOptionalFixed64() uint64 { + if m != nil { + return m.OptionalFixed64 + } + return 0 +} + +func (m *TestAllTypes) GetOptionalSfixed32() int32 { + if m != nil { + return m.OptionalSfixed32 + } + return 0 +} + +func (m *TestAllTypes) GetOptionalSfixed64() int64 { + if m != nil { + return m.OptionalSfixed64 + } + return 0 +} + +func (m *TestAllTypes) GetOptionalFloat() float32 { + if m != nil { + return m.OptionalFloat + } + return 0 +} + +func (m *TestAllTypes) GetOptionalDouble() float64 { + if m != nil { + return m.OptionalDouble + } + return 0 +} + +func (m *TestAllTypes) GetOptionalBool() bool { + if m != nil { + return m.OptionalBool + } + return false +} + +func (m *TestAllTypes) GetOptionalString() string { + if m != nil { + return m.OptionalString + } + return "" +} + +func (m *TestAllTypes) GetOptionalBytes() []byte { + if m != nil { + return m.OptionalBytes + } + return nil +} + +func (m *TestAllTypes) GetOptionalNestedMessage() *TestAllTypes_NestedMessage { + if m != nil { + return m.OptionalNestedMessage + } + return nil +} + +func (m *TestAllTypes) GetOptionalForeignMessage() *ForeignMessage { + if m != nil { + return m.OptionalForeignMessage + } + return nil +} + +func (m *TestAllTypes) GetOptionalNestedEnum() TestAllTypes_NestedEnum { + if m != nil { + return m.OptionalNestedEnum + } + return TestAllTypes_FOO +} + +func (m *TestAllTypes) GetOptionalForeignEnum() ForeignEnum { + if m != nil { + return m.OptionalForeignEnum + } + return ForeignEnum_FOREIGN_FOO +} + +func (m *TestAllTypes) GetOptionalStringPiece() string { + if m != nil { + return m.OptionalStringPiece + } + return "" +} + +func (m *TestAllTypes) GetOptionalCord() string { + if m != nil { + return m.OptionalCord + } + return "" +} + +func (m *TestAllTypes) GetRecursiveMessage() *TestAllTypes { + if m != nil { + return m.RecursiveMessage + } + return nil +} + +func (m *TestAllTypes) GetRepeatedInt32() []int32 { + if m != nil { + return m.RepeatedInt32 + } + return nil +} + +func (m *TestAllTypes) GetRepeatedInt64() []int64 { + if m != nil { + return m.RepeatedInt64 + } + return nil +} + +func (m *TestAllTypes) GetRepeatedUint32() []uint32 { + if m != nil { + return m.RepeatedUint32 + } + return nil +} + +func (m *TestAllTypes) GetRepeatedUint64() []uint64 { + if m != nil { + return m.RepeatedUint64 + } + return nil +} + +func (m *TestAllTypes) GetRepeatedSint32() []int32 { + if m != nil { + return m.RepeatedSint32 + } + return nil +} + +func (m *TestAllTypes) GetRepeatedSint64() []int64 { + if m != nil { + return m.RepeatedSint64 + } + return nil +} + +func (m *TestAllTypes) GetRepeatedFixed32() []uint32 { + if m != nil { + return m.RepeatedFixed32 + } + return nil +} + +func (m *TestAllTypes) GetRepeatedFixed64() []uint64 { + if m != nil { + return m.RepeatedFixed64 + } + return nil +} + +func (m *TestAllTypes) GetRepeatedSfixed32() []int32 { + if m != nil { + return m.RepeatedSfixed32 + } + return nil +} + +func (m *TestAllTypes) GetRepeatedSfixed64() []int64 { + if m != nil { + return m.RepeatedSfixed64 + } + return nil +} + +func (m *TestAllTypes) GetRepeatedFloat() []float32 { + if m != nil { + return m.RepeatedFloat + } + return nil +} + +func (m *TestAllTypes) GetRepeatedDouble() []float64 { + if m != nil { + return m.RepeatedDouble + } + return nil +} + +func (m *TestAllTypes) GetRepeatedBool() []bool { + if m != nil { + return m.RepeatedBool + } + return nil +} + +func (m *TestAllTypes) GetRepeatedString() []string { + if m != nil { + return m.RepeatedString + } + return nil +} + +func (m *TestAllTypes) GetRepeatedBytes() [][]byte { + if m != nil { + return m.RepeatedBytes + } + return nil +} + +func (m *TestAllTypes) GetRepeatedNestedMessage() []*TestAllTypes_NestedMessage { + if m != nil { + return m.RepeatedNestedMessage + } + return nil +} + +func (m *TestAllTypes) GetRepeatedForeignMessage() []*ForeignMessage { + if m != nil { + return m.RepeatedForeignMessage + } + return nil +} + +func (m *TestAllTypes) GetRepeatedNestedEnum() []TestAllTypes_NestedEnum { + if m != nil { + return m.RepeatedNestedEnum + } + return nil +} + +func (m *TestAllTypes) GetRepeatedForeignEnum() []ForeignEnum { + if m != nil { + return m.RepeatedForeignEnum + } + return nil +} + +func (m *TestAllTypes) GetRepeatedStringPiece() []string { + if m != nil { + return m.RepeatedStringPiece + } + return nil +} + +func (m *TestAllTypes) GetRepeatedCord() []string { + if m != nil { + return m.RepeatedCord + } + return nil +} + +func (m *TestAllTypes) GetMapInt32Int32() map[int32]int32 { + if m != nil { + return m.MapInt32Int32 + } + return nil +} + +func (m *TestAllTypes) GetMapInt64Int64() map[int64]int64 { + if m != nil { + return m.MapInt64Int64 + } + return nil +} + +func (m *TestAllTypes) GetMapUint32Uint32() map[uint32]uint32 { + if m != nil { + return m.MapUint32Uint32 + } + return nil +} + +func (m *TestAllTypes) GetMapUint64Uint64() map[uint64]uint64 { + if m != nil { + return m.MapUint64Uint64 + } + return nil +} + +func (m *TestAllTypes) GetMapSint32Sint32() map[int32]int32 { + if m != nil { + return m.MapSint32Sint32 + } + return nil +} + +func (m *TestAllTypes) GetMapSint64Sint64() map[int64]int64 { + if m != nil { + return m.MapSint64Sint64 + } + return nil +} + +func (m *TestAllTypes) GetMapFixed32Fixed32() map[uint32]uint32 { + if m != nil { + return m.MapFixed32Fixed32 + } + return nil +} + +func (m *TestAllTypes) GetMapFixed64Fixed64() map[uint64]uint64 { + if m != nil { + return m.MapFixed64Fixed64 + } + return nil +} + +func (m *TestAllTypes) GetMapSfixed32Sfixed32() map[int32]int32 { + if m != nil { + return m.MapSfixed32Sfixed32 + } + return nil +} + +func (m *TestAllTypes) GetMapSfixed64Sfixed64() map[int64]int64 { + if m != nil { + return m.MapSfixed64Sfixed64 + } + return nil +} + +func (m *TestAllTypes) GetMapInt32Float() map[int32]float32 { + if m != nil { + return m.MapInt32Float + } + return nil +} + +func (m *TestAllTypes) GetMapInt32Double() map[int32]float64 { + if m != nil { + return m.MapInt32Double + } + return nil +} + +func (m *TestAllTypes) GetMapBoolBool() map[bool]bool { + if m != nil { + return m.MapBoolBool + } + return nil +} + +func (m *TestAllTypes) GetMapStringString() map[string]string { + if m != nil { + return m.MapStringString + } + return nil +} + +func (m *TestAllTypes) GetMapStringBytes() map[string][]byte { + if m != nil { + return m.MapStringBytes + } + return nil +} + +func (m *TestAllTypes) GetMapStringNestedMessage() map[string]*TestAllTypes_NestedMessage { + if m != nil { + return m.MapStringNestedMessage + } + return nil +} + +func (m *TestAllTypes) GetMapStringForeignMessage() map[string]*ForeignMessage { + if m != nil { + return m.MapStringForeignMessage + } + return nil +} + +func (m *TestAllTypes) GetMapStringNestedEnum() map[string]TestAllTypes_NestedEnum { + if m != nil { + return m.MapStringNestedEnum + } + return nil +} + +func (m *TestAllTypes) GetMapStringForeignEnum() map[string]ForeignEnum { + if m != nil { + return m.MapStringForeignEnum + } + return nil +} + +func (m *TestAllTypes) GetOneofUint32() uint32 { + if x, ok := m.GetOneofField().(*TestAllTypes_OneofUint32); ok { + return x.OneofUint32 + } + return 0 +} + +func (m *TestAllTypes) GetOneofNestedMessage() *TestAllTypes_NestedMessage { + if x, ok := m.GetOneofField().(*TestAllTypes_OneofNestedMessage); ok { + return x.OneofNestedMessage + } + return nil +} + +func (m *TestAllTypes) GetOneofString() string { + if x, ok := m.GetOneofField().(*TestAllTypes_OneofString); ok { + return x.OneofString + } + return "" +} + +func (m *TestAllTypes) GetOneofBytes() []byte { + if x, ok := m.GetOneofField().(*TestAllTypes_OneofBytes); ok { + return x.OneofBytes + } + return nil +} + +func (m *TestAllTypes) GetOneofBool() bool { + if x, ok := m.GetOneofField().(*TestAllTypes_OneofBool); ok { + return x.OneofBool + } + return false +} + +func (m *TestAllTypes) GetOneofUint64() uint64 { + if x, ok := m.GetOneofField().(*TestAllTypes_OneofUint64); ok { + return x.OneofUint64 + } + return 0 +} + +func (m *TestAllTypes) GetOneofFloat() float32 { + if x, ok := m.GetOneofField().(*TestAllTypes_OneofFloat); ok { + return x.OneofFloat + } + return 0 +} + +func (m *TestAllTypes) GetOneofDouble() float64 { + if x, ok := m.GetOneofField().(*TestAllTypes_OneofDouble); ok { + return x.OneofDouble + } + return 0 +} + +func (m *TestAllTypes) GetOneofEnum() TestAllTypes_NestedEnum { + if x, ok := m.GetOneofField().(*TestAllTypes_OneofEnum); ok { + return x.OneofEnum + } + return TestAllTypes_FOO +} + +func (m *TestAllTypes) GetOptionalBoolWrapper() *google_protobuf5.BoolValue { + if m != nil { + return m.OptionalBoolWrapper + } + return nil +} + +func (m *TestAllTypes) GetOptionalInt32Wrapper() *google_protobuf5.Int32Value { + if m != nil { + return m.OptionalInt32Wrapper + } + return nil +} + +func (m *TestAllTypes) GetOptionalInt64Wrapper() *google_protobuf5.Int64Value { + if m != nil { + return m.OptionalInt64Wrapper + } + return nil +} + +func (m *TestAllTypes) GetOptionalUint32Wrapper() *google_protobuf5.UInt32Value { + if m != nil { + return m.OptionalUint32Wrapper + } + return nil +} + +func (m *TestAllTypes) GetOptionalUint64Wrapper() *google_protobuf5.UInt64Value { + if m != nil { + return m.OptionalUint64Wrapper + } + return nil +} + +func (m *TestAllTypes) GetOptionalFloatWrapper() *google_protobuf5.FloatValue { + if m != nil { + return m.OptionalFloatWrapper + } + return nil +} + +func (m *TestAllTypes) GetOptionalDoubleWrapper() *google_protobuf5.DoubleValue { + if m != nil { + return m.OptionalDoubleWrapper + } + return nil +} + +func (m *TestAllTypes) GetOptionalStringWrapper() *google_protobuf5.StringValue { + if m != nil { + return m.OptionalStringWrapper + } + return nil +} + +func (m *TestAllTypes) GetOptionalBytesWrapper() *google_protobuf5.BytesValue { + if m != nil { + return m.OptionalBytesWrapper + } + return nil +} + +func (m *TestAllTypes) GetRepeatedBoolWrapper() []*google_protobuf5.BoolValue { + if m != nil { + return m.RepeatedBoolWrapper + } + return nil +} + +func (m *TestAllTypes) GetRepeatedInt32Wrapper() []*google_protobuf5.Int32Value { + if m != nil { + return m.RepeatedInt32Wrapper + } + return nil +} + +func (m *TestAllTypes) GetRepeatedInt64Wrapper() []*google_protobuf5.Int64Value { + if m != nil { + return m.RepeatedInt64Wrapper + } + return nil +} + +func (m *TestAllTypes) GetRepeatedUint32Wrapper() []*google_protobuf5.UInt32Value { + if m != nil { + return m.RepeatedUint32Wrapper + } + return nil +} + +func (m *TestAllTypes) GetRepeatedUint64Wrapper() []*google_protobuf5.UInt64Value { + if m != nil { + return m.RepeatedUint64Wrapper + } + return nil +} + +func (m *TestAllTypes) GetRepeatedFloatWrapper() []*google_protobuf5.FloatValue { + if m != nil { + return m.RepeatedFloatWrapper + } + return nil +} + +func (m *TestAllTypes) GetRepeatedDoubleWrapper() []*google_protobuf5.DoubleValue { + if m != nil { + return m.RepeatedDoubleWrapper + } + return nil +} + +func (m *TestAllTypes) GetRepeatedStringWrapper() []*google_protobuf5.StringValue { + if m != nil { + return m.RepeatedStringWrapper + } + return nil +} + +func (m *TestAllTypes) GetRepeatedBytesWrapper() []*google_protobuf5.BytesValue { + if m != nil { + return m.RepeatedBytesWrapper + } + return nil +} + +func (m *TestAllTypes) GetOptionalDuration() *google_protobuf1.Duration { + if m != nil { + return m.OptionalDuration + } + return nil +} + +func (m *TestAllTypes) GetOptionalTimestamp() *google_protobuf4.Timestamp { + if m != nil { + return m.OptionalTimestamp + } + return nil +} + +func (m *TestAllTypes) GetOptionalFieldMask() *google_protobuf2.FieldMask { + if m != nil { + return m.OptionalFieldMask + } + return nil +} + +func (m *TestAllTypes) GetOptionalStruct() *google_protobuf3.Struct { + if m != nil { + return m.OptionalStruct + } + return nil +} + +func (m *TestAllTypes) GetOptionalAny() *google_protobuf.Any { + if m != nil { + return m.OptionalAny + } + return nil +} + +func (m *TestAllTypes) GetOptionalValue() *google_protobuf3.Value { + if m != nil { + return m.OptionalValue + } + return nil +} + +func (m *TestAllTypes) GetRepeatedDuration() []*google_protobuf1.Duration { + if m != nil { + return m.RepeatedDuration + } + return nil +} + +func (m *TestAllTypes) GetRepeatedTimestamp() []*google_protobuf4.Timestamp { + if m != nil { + return m.RepeatedTimestamp + } + return nil +} + +func (m *TestAllTypes) GetRepeatedFieldmask() []*google_protobuf2.FieldMask { + if m != nil { + return m.RepeatedFieldmask + } + return nil +} + +func (m *TestAllTypes) GetRepeatedStruct() []*google_protobuf3.Struct { + if m != nil { + return m.RepeatedStruct + } + return nil +} + +func (m *TestAllTypes) GetRepeatedAny() []*google_protobuf.Any { + if m != nil { + return m.RepeatedAny + } + return nil +} + +func (m *TestAllTypes) GetRepeatedValue() []*google_protobuf3.Value { + if m != nil { + return m.RepeatedValue + } + return nil +} + +func (m *TestAllTypes) GetFieldname1() int32 { + if m != nil { + return m.Fieldname1 + } + return 0 +} + +func (m *TestAllTypes) GetFieldName2() int32 { + if m != nil { + return m.FieldName2 + } + return 0 +} + +func (m *TestAllTypes) GetXFieldName3() int32 { + if m != nil { + return m.XFieldName3 + } + return 0 +} + +func (m *TestAllTypes) GetField_Name4_() int32 { + if m != nil { + return m.Field_Name4_ + } + return 0 +} + +func (m *TestAllTypes) GetField0Name5() int32 { + if m != nil { + return m.Field0Name5 + } + return 0 +} + +func (m *TestAllTypes) GetField_0Name6() int32 { + if m != nil { + return m.Field_0Name6 + } + return 0 +} + +func (m *TestAllTypes) GetFieldName7() int32 { + if m != nil { + return m.FieldName7 + } + return 0 +} + +func (m *TestAllTypes) GetFieldName8() int32 { + if m != nil { + return m.FieldName8 + } + return 0 +} + +func (m *TestAllTypes) GetField_Name9() int32 { + if m != nil { + return m.Field_Name9 + } + return 0 +} + +func (m *TestAllTypes) GetField_Name10() int32 { + if m != nil { + return m.Field_Name10 + } + return 0 +} + +func (m *TestAllTypes) GetFIELD_NAME11() int32 { + if m != nil { + return m.FIELD_NAME11 + } + return 0 +} + +func (m *TestAllTypes) GetFIELDName12() int32 { + if m != nil { + return m.FIELDName12 + } + return 0 +} + +func (m *TestAllTypes) GetXFieldName13() int32 { + if m != nil { + return m.XFieldName13 + } + return 0 +} + +func (m *TestAllTypes) GetX_FieldName14() int32 { + if m != nil { + return m.X_FieldName14 + } + return 0 +} + +func (m *TestAllTypes) GetField_Name15() int32 { + if m != nil { + return m.Field_Name15 + } + return 0 +} + +func (m *TestAllTypes) GetField__Name16() int32 { + if m != nil { + return m.Field__Name16 + } + return 0 +} + +func (m *TestAllTypes) GetFieldName17__() int32 { + if m != nil { + return m.FieldName17__ + } + return 0 +} + +func (m *TestAllTypes) GetFieldName18__() int32 { + if m != nil { + return m.FieldName18__ + } + return 0 +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*TestAllTypes) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _TestAllTypes_OneofMarshaler, _TestAllTypes_OneofUnmarshaler, _TestAllTypes_OneofSizer, []interface{}{ + (*TestAllTypes_OneofUint32)(nil), + (*TestAllTypes_OneofNestedMessage)(nil), + (*TestAllTypes_OneofString)(nil), + (*TestAllTypes_OneofBytes)(nil), + (*TestAllTypes_OneofBool)(nil), + (*TestAllTypes_OneofUint64)(nil), + (*TestAllTypes_OneofFloat)(nil), + (*TestAllTypes_OneofDouble)(nil), + (*TestAllTypes_OneofEnum)(nil), + } +} + +func _TestAllTypes_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*TestAllTypes) + // oneof_field + switch x := m.OneofField.(type) { + case *TestAllTypes_OneofUint32: + b.EncodeVarint(111<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.OneofUint32)) + case *TestAllTypes_OneofNestedMessage: + b.EncodeVarint(112<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.OneofNestedMessage); err != nil { + return err + } + case *TestAllTypes_OneofString: + b.EncodeVarint(113<<3 | proto.WireBytes) + b.EncodeStringBytes(x.OneofString) + case *TestAllTypes_OneofBytes: + b.EncodeVarint(114<<3 | proto.WireBytes) + b.EncodeRawBytes(x.OneofBytes) + case *TestAllTypes_OneofBool: + t := uint64(0) + if x.OneofBool { + t = 1 + } + b.EncodeVarint(115<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *TestAllTypes_OneofUint64: + b.EncodeVarint(116<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.OneofUint64)) + case *TestAllTypes_OneofFloat: + b.EncodeVarint(117<<3 | proto.WireFixed32) + b.EncodeFixed32(uint64(math.Float32bits(x.OneofFloat))) + case *TestAllTypes_OneofDouble: + b.EncodeVarint(118<<3 | proto.WireFixed64) + b.EncodeFixed64(math.Float64bits(x.OneofDouble)) + case *TestAllTypes_OneofEnum: + b.EncodeVarint(119<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.OneofEnum)) + case nil: + default: + return fmt.Errorf("TestAllTypes.OneofField has unexpected type %T", x) + } + return nil +} + +func _TestAllTypes_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*TestAllTypes) + switch tag { + case 111: // oneof_field.oneof_uint32 + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.OneofField = &TestAllTypes_OneofUint32{uint32(x)} + return true, err + case 112: // oneof_field.oneof_nested_message + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TestAllTypes_NestedMessage) + err := b.DecodeMessage(msg) + m.OneofField = &TestAllTypes_OneofNestedMessage{msg} + return true, err + case 113: // oneof_field.oneof_string + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.OneofField = &TestAllTypes_OneofString{x} + return true, err + case 114: // oneof_field.oneof_bytes + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.OneofField = &TestAllTypes_OneofBytes{x} + return true, err + case 115: // oneof_field.oneof_bool + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.OneofField = &TestAllTypes_OneofBool{x != 0} + return true, err + case 116: // oneof_field.oneof_uint64 + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.OneofField = &TestAllTypes_OneofUint64{x} + return true, err + case 117: // oneof_field.oneof_float + if wire != proto.WireFixed32 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed32() + m.OneofField = &TestAllTypes_OneofFloat{math.Float32frombits(uint32(x))} + return true, err + case 118: // oneof_field.oneof_double + if wire != proto.WireFixed64 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed64() + m.OneofField = &TestAllTypes_OneofDouble{math.Float64frombits(x)} + return true, err + case 119: // oneof_field.oneof_enum + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.OneofField = &TestAllTypes_OneofEnum{TestAllTypes_NestedEnum(x)} + return true, err + default: + return false, nil + } +} + +func _TestAllTypes_OneofSizer(msg proto.Message) (n int) { + m := msg.(*TestAllTypes) + // oneof_field + switch x := m.OneofField.(type) { + case *TestAllTypes_OneofUint32: + n += proto.SizeVarint(111<<3 | proto.WireVarint) + n += proto.SizeVarint(uint64(x.OneofUint32)) + case *TestAllTypes_OneofNestedMessage: + s := proto.Size(x.OneofNestedMessage) + n += proto.SizeVarint(112<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *TestAllTypes_OneofString: + n += proto.SizeVarint(113<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(len(x.OneofString))) + n += len(x.OneofString) + case *TestAllTypes_OneofBytes: + n += proto.SizeVarint(114<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(len(x.OneofBytes))) + n += len(x.OneofBytes) + case *TestAllTypes_OneofBool: + n += proto.SizeVarint(115<<3 | proto.WireVarint) + n += 1 + case *TestAllTypes_OneofUint64: + n += proto.SizeVarint(116<<3 | proto.WireVarint) + n += proto.SizeVarint(uint64(x.OneofUint64)) + case *TestAllTypes_OneofFloat: + n += proto.SizeVarint(117<<3 | proto.WireFixed32) + n += 4 + case *TestAllTypes_OneofDouble: + n += proto.SizeVarint(118<<3 | proto.WireFixed64) + n += 8 + case *TestAllTypes_OneofEnum: + n += proto.SizeVarint(119<<3 | proto.WireVarint) + n += proto.SizeVarint(uint64(x.OneofEnum)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type TestAllTypes_NestedMessage struct { + A int32 `protobuf:"varint,1,opt,name=a" json:"a,omitempty"` + Corecursive *TestAllTypes `protobuf:"bytes,2,opt,name=corecursive" json:"corecursive,omitempty"` +} + +func (m *TestAllTypes_NestedMessage) Reset() { *m = TestAllTypes_NestedMessage{} } +func (m *TestAllTypes_NestedMessage) String() string { return proto.CompactTextString(m) } +func (*TestAllTypes_NestedMessage) ProtoMessage() {} +func (*TestAllTypes_NestedMessage) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2, 0} } + +func (m *TestAllTypes_NestedMessage) GetA() int32 { + if m != nil { + return m.A + } + return 0 +} + +func (m *TestAllTypes_NestedMessage) GetCorecursive() *TestAllTypes { + if m != nil { + return m.Corecursive + } + return nil +} + +type ForeignMessage struct { + C int32 `protobuf:"varint,1,opt,name=c" json:"c,omitempty"` +} + +func (m *ForeignMessage) Reset() { *m = ForeignMessage{} } +func (m *ForeignMessage) String() string { return proto.CompactTextString(m) } +func (*ForeignMessage) ProtoMessage() {} +func (*ForeignMessage) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } + +func (m *ForeignMessage) GetC() int32 { + if m != nil { + return m.C + } + return 0 +} + +func init() { + proto.RegisterType((*ConformanceRequest)(nil), "conformance.ConformanceRequest") + proto.RegisterType((*ConformanceResponse)(nil), "conformance.ConformanceResponse") + proto.RegisterType((*TestAllTypes)(nil), "conformance.TestAllTypes") + proto.RegisterType((*TestAllTypes_NestedMessage)(nil), "conformance.TestAllTypes.NestedMessage") + proto.RegisterType((*ForeignMessage)(nil), "conformance.ForeignMessage") + proto.RegisterEnum("conformance.WireFormat", WireFormat_name, WireFormat_value) + proto.RegisterEnum("conformance.ForeignEnum", ForeignEnum_name, ForeignEnum_value) + proto.RegisterEnum("conformance.TestAllTypes_NestedEnum", TestAllTypes_NestedEnum_name, TestAllTypes_NestedEnum_value) +} + +func init() { proto.RegisterFile("conformance_proto/conformance.proto", fileDescriptor0) } + +var fileDescriptor0 = []byte{ + // 2737 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x5a, 0xd9, 0x72, 0xdb, 0xc8, + 0xd5, 0x16, 0x08, 0x59, 0x4b, 0x93, 0x92, 0xa8, 0xd6, 0xd6, 0x96, 0x5d, 0x63, 0x58, 0xb2, 0x7f, + 0xd3, 0xf6, 0x8c, 0xac, 0x05, 0x86, 0x65, 0xcf, 0x3f, 0x8e, 0x45, 0x9b, 0xb4, 0xe4, 0x8c, 0x25, + 0x17, 0x64, 0x8d, 0xab, 0x9c, 0x0b, 0x06, 0xa6, 0x20, 0x15, 0xc7, 0x24, 0xc1, 0x01, 0x48, 0x4f, + 0x94, 0xcb, 0xbc, 0x41, 0xf6, 0x7d, 0xbd, 0xcf, 0x7a, 0x93, 0xa4, 0x92, 0xab, 0x54, 0x6e, 0xb2, + 0x27, 0x95, 0x3d, 0x79, 0x85, 0xbc, 0x43, 0x52, 0xbd, 0xa2, 0xbb, 0x01, 0x50, 0xf4, 0x54, 0x0d, + 0x25, 0x1e, 0x7c, 0xfd, 0x9d, 0xd3, 0xe7, 0x1c, 0x7c, 0x2d, 0x1c, 0x18, 0x2c, 0xd7, 0x83, 0xf6, + 0x51, 0x10, 0xb6, 0xbc, 0x76, 0xdd, 0xaf, 0x75, 0xc2, 0xa0, 0x1b, 0xdc, 0x90, 0x2c, 0x2b, 0xc4, + 0x02, 0xf3, 0x92, 0x69, 0xf1, 0xec, 0x71, 0x10, 0x1c, 0x37, 0xfd, 0x1b, 0xe4, 0xd2, 0x8b, 0xde, + 0xd1, 0x0d, 0xaf, 0x7d, 0x42, 0x71, 0x8b, 0x6f, 0xe8, 0x97, 0x0e, 0x7b, 0xa1, 0xd7, 0x6d, 0x04, + 0x6d, 0x76, 0xdd, 0xd2, 0xaf, 0x1f, 0x35, 0xfc, 0xe6, 0x61, 0xad, 0xe5, 0x45, 0x2f, 0x19, 0xe2, + 0xbc, 0x8e, 0x88, 0xba, 0x61, 0xaf, 0xde, 0x65, 0x57, 0x2f, 0xe8, 0x57, 0xbb, 0x8d, 0x96, 0x1f, + 0x75, 0xbd, 0x56, 0x27, 0x2b, 0x80, 0x0f, 0x43, 0xaf, 0xd3, 0xf1, 0xc3, 0x88, 0x5e, 0x5f, 0xfa, + 0x85, 0x01, 0xe0, 0xfd, 0x78, 0x2f, 0xae, 0xff, 0x41, 0xcf, 0x8f, 0xba, 0xf0, 0x3a, 0x28, 0xf2, + 0x15, 0xb5, 0x8e, 0x77, 0xd2, 0x0c, 0xbc, 0x43, 0x64, 0x58, 0x46, 0xa9, 0xb0, 0x3d, 0xe4, 0x4e, + 0xf1, 0x2b, 0x4f, 0xe8, 0x05, 0xb8, 0x0c, 0x0a, 0xef, 0x47, 0x41, 0x5b, 0x00, 0x73, 0x96, 0x51, + 0x1a, 0xdf, 0x1e, 0x72, 0xf3, 0xd8, 0xca, 0x41, 0x7b, 0x60, 0x21, 0xa4, 0xe4, 0xfe, 0x61, 0x2d, + 0xe8, 0x75, 0x3b, 0xbd, 0x6e, 0x8d, 0x78, 0xed, 0x22, 0xd3, 0x32, 0x4a, 0x93, 0xeb, 0x0b, 0x2b, + 0x72, 0x9a, 0x9f, 0x35, 0x42, 0xbf, 0x4a, 0x2e, 0xbb, 0x73, 0x62, 0xdd, 0x1e, 0x59, 0x46, 0xcd, + 0xe5, 0x71, 0x30, 0xca, 0x1c, 0x2e, 0x7d, 0x2a, 0x07, 0x66, 0x94, 0x4d, 0x44, 0x9d, 0xa0, 0x1d, + 0xf9, 0xf0, 0x22, 0xc8, 0x77, 0xbc, 0x30, 0xf2, 0x6b, 0x7e, 0x18, 0x06, 0x21, 0xd9, 0x00, 0x8e, + 0x0b, 0x10, 0x63, 0x05, 0xdb, 0xe0, 0x55, 0x30, 0x15, 0xf9, 0x61, 0xc3, 0x6b, 0x36, 0x3e, 0xc9, + 0x61, 0x23, 0x0c, 0x36, 0x29, 0x2e, 0x50, 0xe8, 0x65, 0x30, 0x11, 0xf6, 0xda, 0x38, 0xc1, 0x0c, + 0xc8, 0xf7, 0x59, 0x60, 0x66, 0x0a, 0x4b, 0x4b, 0x9d, 0x39, 0x68, 0xea, 0x86, 0xd3, 0x52, 0xb7, + 0x08, 0x46, 0xa3, 0x97, 0x8d, 0x4e, 0xc7, 0x3f, 0x44, 0x67, 0xd8, 0x75, 0x6e, 0x28, 0x8f, 0x81, + 0x91, 0xd0, 0x8f, 0x7a, 0xcd, 0xee, 0xd2, 0x7f, 0xaa, 0xa0, 0xf0, 0xd4, 0x8f, 0xba, 0x5b, 0xcd, + 0xe6, 0xd3, 0x93, 0x8e, 0x1f, 0xc1, 0xcb, 0x60, 0x32, 0xe8, 0xe0, 0x5e, 0xf3, 0x9a, 0xb5, 0x46, + 0xbb, 0xbb, 0xb1, 0x4e, 0x12, 0x70, 0xc6, 0x9d, 0xe0, 0xd6, 0x1d, 0x6c, 0xd4, 0x61, 0x8e, 0x4d, + 0xf6, 0x65, 0x2a, 0x30, 0xc7, 0x86, 0x57, 0xc0, 0x94, 0x80, 0xf5, 0x28, 0x1d, 0xde, 0xd5, 0x84, + 0x2b, 0x56, 0x1f, 0x10, 0x6b, 0x02, 0xe8, 0xd8, 0x64, 0x57, 0xc3, 0x2a, 0x50, 0x63, 0x8c, 0x28, + 0x23, 0xde, 0xde, 0x74, 0x0c, 0xdc, 0x4f, 0x32, 0x46, 0x94, 0x11, 0xd7, 0x08, 0xaa, 0x40, 0xc7, + 0x86, 0x57, 0x41, 0x51, 0x00, 0x8f, 0x1a, 0x9f, 0xf0, 0x0f, 0x37, 0xd6, 0xd1, 0xa8, 0x65, 0x94, + 0x46, 0x5d, 0x41, 0x50, 0xa5, 0xe6, 0x24, 0xd4, 0xb1, 0xd1, 0x98, 0x65, 0x94, 0x46, 0x34, 0xa8, + 0x63, 0xc3, 0xeb, 0x60, 0x3a, 0x76, 0xcf, 0x69, 0xc7, 0x2d, 0xa3, 0x34, 0xe5, 0x0a, 0x8e, 0x7d, + 0x66, 0x4f, 0x01, 0x3b, 0x36, 0x02, 0x96, 0x51, 0x2a, 0xea, 0x60, 0xc7, 0x56, 0x52, 0x7f, 0xd4, + 0x0c, 0xbc, 0x2e, 0xca, 0x5b, 0x46, 0x29, 0x17, 0xa7, 0xbe, 0x8a, 0x8d, 0xca, 0xfe, 0x0f, 0x83, + 0xde, 0x8b, 0xa6, 0x8f, 0x0a, 0x96, 0x51, 0x32, 0xe2, 0xfd, 0x3f, 0x20, 0x56, 0xb8, 0x0c, 0xc4, + 0xca, 0xda, 0x8b, 0x20, 0x68, 0xa2, 0x09, 0xcb, 0x28, 0x8d, 0xb9, 0x05, 0x6e, 0x2c, 0x07, 0x41, + 0x53, 0xcd, 0x66, 0x37, 0x6c, 0xb4, 0x8f, 0xd1, 0x24, 0xee, 0x2a, 0x29, 0x9b, 0xc4, 0xaa, 0x44, + 0xf7, 0xe2, 0xa4, 0xeb, 0x47, 0x68, 0x0a, 0xb7, 0x71, 0x1c, 0x5d, 0x19, 0x1b, 0x61, 0x0d, 0x2c, + 0x08, 0x58, 0x9b, 0xde, 0xde, 0x2d, 0x3f, 0x8a, 0xbc, 0x63, 0x1f, 0x41, 0xcb, 0x28, 0xe5, 0xd7, + 0xaf, 0x28, 0x37, 0xb6, 0xdc, 0xa2, 0x2b, 0xbb, 0x04, 0xff, 0x98, 0xc2, 0xdd, 0x39, 0xce, 0xa3, + 0x98, 0xe1, 0x01, 0x40, 0x71, 0x96, 0x82, 0xd0, 0x6f, 0x1c, 0xb7, 0x85, 0x87, 0x19, 0xe2, 0xe1, + 0x9c, 0xe2, 0xa1, 0x4a, 0x31, 0x9c, 0x75, 0x5e, 0x24, 0x53, 0xb1, 0xc3, 0xf7, 0xc0, 0xac, 0x1e, + 0xb7, 0xdf, 0xee, 0xb5, 0xd0, 0x1c, 0x51, 0xa3, 0x4b, 0xa7, 0x05, 0x5d, 0x69, 0xf7, 0x5a, 0x2e, + 0x54, 0x23, 0xc6, 0x36, 0xf8, 0x2e, 0x98, 0x4b, 0x84, 0x4b, 0x88, 0xe7, 0x09, 0x31, 0x4a, 0x8b, + 0x95, 0x90, 0xcd, 0x68, 0x81, 0x12, 0x36, 0x47, 0x62, 0xa3, 0xd5, 0xaa, 0x75, 0x1a, 0x7e, 0xdd, + 0x47, 0x08, 0xd7, 0xac, 0x9c, 0x1b, 0xcb, 0xc5, 0xeb, 0x68, 0xdd, 0x9e, 0xe0, 0xcb, 0xf0, 0x8a, + 0xd4, 0x0a, 0xf5, 0x20, 0x3c, 0x44, 0x67, 0x19, 0xde, 0x88, 0xdb, 0xe1, 0x7e, 0x10, 0x1e, 0xc2, + 0x2a, 0x98, 0x0e, 0xfd, 0x7a, 0x2f, 0x8c, 0x1a, 0xaf, 0x7c, 0x91, 0xd6, 0x73, 0x24, 0xad, 0x67, + 0x33, 0x73, 0xe0, 0x16, 0xc5, 0x1a, 0x9e, 0xce, 0xcb, 0x60, 0x32, 0xf4, 0x3b, 0xbe, 0x87, 0xf3, + 0x48, 0x6f, 0xe6, 0x0b, 0x96, 0x89, 0xd5, 0x86, 0x5b, 0x85, 0xda, 0xc8, 0x30, 0xc7, 0x46, 0x96, + 0x65, 0x62, 0xb5, 0x91, 0x60, 0x54, 0x1b, 0x04, 0x8c, 0xa9, 0xcd, 0x45, 0xcb, 0xc4, 0x6a, 0xc3, + 0xcd, 0xb1, 0xda, 0x28, 0x40, 0xc7, 0x46, 0x4b, 0x96, 0x89, 0xd5, 0x46, 0x06, 0x6a, 0x8c, 0x4c, + 0x6d, 0x96, 0x2d, 0x13, 0xab, 0x0d, 0x37, 0xef, 0x27, 0x19, 0x99, 0xda, 0x5c, 0xb2, 0x4c, 0xac, + 0x36, 0x32, 0x90, 0xaa, 0x8d, 0x00, 0x72, 0x59, 0xb8, 0x6c, 0x99, 0x58, 0x6d, 0xb8, 0x5d, 0x52, + 0x1b, 0x15, 0xea, 0xd8, 0xe8, 0xff, 0x2c, 0x13, 0xab, 0x8d, 0x02, 0xa5, 0x6a, 0x13, 0xbb, 0xe7, + 0xb4, 0x57, 0x2c, 0x13, 0xab, 0x8d, 0x08, 0x40, 0x52, 0x1b, 0x0d, 0xec, 0xd8, 0xa8, 0x64, 0x99, + 0x58, 0x6d, 0x54, 0x30, 0x55, 0x9b, 0x38, 0x08, 0xa2, 0x36, 0x57, 0x2d, 0x13, 0xab, 0x8d, 0x08, + 0x81, 0xab, 0x8d, 0x80, 0x31, 0xb5, 0xb9, 0x66, 0x99, 0x58, 0x6d, 0xb8, 0x39, 0x56, 0x1b, 0x01, + 0x24, 0x6a, 0x73, 0xdd, 0x32, 0xb1, 0xda, 0x70, 0x23, 0x57, 0x9b, 0x38, 0x42, 0xaa, 0x36, 0x6f, + 0x5a, 0x26, 0x56, 0x1b, 0x11, 0x9f, 0x50, 0x9b, 0x98, 0x8d, 0xa8, 0xcd, 0x5b, 0x96, 0x89, 0xd5, + 0x46, 0xd0, 0x71, 0xb5, 0x11, 0x30, 0x4d, 0x6d, 0x56, 0x2d, 0xf3, 0xb5, 0xd4, 0x86, 0xf3, 0x24, + 0xd4, 0x26, 0xce, 0x92, 0xa6, 0x36, 0x6b, 0xc4, 0x43, 0x7f, 0xb5, 0x11, 0xc9, 0x4c, 0xa8, 0x8d, + 0x1e, 0x37, 0x11, 0x85, 0x0d, 0xcb, 0x1c, 0x5c, 0x6d, 0xd4, 0x88, 0xb9, 0xda, 0x24, 0xc2, 0x25, + 0xc4, 0x36, 0x21, 0xee, 0xa3, 0x36, 0x5a, 0xa0, 0x5c, 0x6d, 0xb4, 0x6a, 0x31, 0xb5, 0x71, 0x70, + 0xcd, 0xa8, 0xda, 0xa8, 0x75, 0x13, 0x6a, 0x23, 0xd6, 0x11, 0xb5, 0xb9, 0xc5, 0xf0, 0x46, 0xdc, + 0x0e, 0x44, 0x6d, 0x9e, 0x82, 0xa9, 0x96, 0xd7, 0xa1, 0x02, 0xc1, 0x64, 0x62, 0x93, 0x24, 0xf5, + 0xcd, 0xec, 0x0c, 0x3c, 0xf6, 0x3a, 0x44, 0x3b, 0xc8, 0x47, 0xa5, 0xdd, 0x0d, 0x4f, 0xdc, 0x89, + 0x96, 0x6c, 0x93, 0x58, 0x1d, 0x9b, 0xa9, 0xca, 0xed, 0xc1, 0x58, 0x1d, 0x9b, 0x7c, 0x28, 0xac, + 0xcc, 0x06, 0x9f, 0x83, 0x69, 0xcc, 0x4a, 0xe5, 0x87, 0xab, 0xd0, 0x1d, 0xc2, 0xbb, 0xd2, 0x97, + 0x97, 0x4a, 0x13, 0xfd, 0xa4, 0xcc, 0x38, 0x3c, 0xd9, 0x2a, 0x73, 0x3b, 0x36, 0x17, 0xae, 0xb7, + 0x07, 0xe4, 0x76, 0x6c, 0xfa, 0xa9, 0x72, 0x73, 0x2b, 0xe7, 0xa6, 0x22, 0xc7, 0xb5, 0xee, 0xff, + 0x07, 0xe0, 0xa6, 0x02, 0xb8, 0xaf, 0xc5, 0x2d, 0x5b, 0x65, 0x6e, 0xc7, 0xe6, 0xf2, 0xf8, 0xce, + 0x80, 0xdc, 0x8e, 0xbd, 0xaf, 0xc5, 0x2d, 0x5b, 0xe1, 0xc7, 0xc1, 0x0c, 0xe6, 0x66, 0xda, 0x26, + 0x24, 0xf5, 0x2e, 0x61, 0x5f, 0xed, 0xcb, 0xce, 0x74, 0x96, 0xfd, 0xa0, 0xfc, 0x38, 0x50, 0xd5, + 0xae, 0x78, 0x70, 0x6c, 0xa1, 0xc4, 0x1f, 0x19, 0xd4, 0x83, 0x63, 0xb3, 0x1f, 0x9a, 0x07, 0x61, + 0x87, 0x47, 0x60, 0x8e, 0xe4, 0x87, 0x6f, 0x42, 0x28, 0xf8, 0x3d, 0xe2, 0x63, 0xbd, 0x7f, 0x8e, + 0x18, 0x98, 0xff, 0xa4, 0x5e, 0x70, 0xc8, 0xfa, 0x15, 0xd5, 0x0f, 0xae, 0x04, 0xdf, 0xcb, 0xd6, + 0xc0, 0x7e, 0x1c, 0x9b, 0xff, 0xd4, 0xfd, 0xc4, 0x57, 0xd4, 0xfb, 0x95, 0x1e, 0x1a, 0xe5, 0x41, + 0xef, 0x57, 0x72, 0x9c, 0x68, 0xf7, 0x2b, 0x3d, 0x62, 0x9e, 0x81, 0x62, 0xcc, 0xca, 0xce, 0x98, + 0xfb, 0x84, 0xf6, 0xad, 0xd3, 0x69, 0xe9, 0xe9, 0x43, 0x79, 0x27, 0x5b, 0x8a, 0x11, 0xee, 0x02, + 0xec, 0x89, 0x9c, 0x46, 0xf4, 0x48, 0x7a, 0x40, 0x58, 0xaf, 0xf5, 0x65, 0xc5, 0xe7, 0x14, 0xfe, + 0x9f, 0x52, 0xe6, 0x5b, 0xb1, 0x45, 0xb4, 0x3b, 0x95, 0x42, 0x76, 0x7e, 0x55, 0x06, 0x69, 0x77, + 0x02, 0xa5, 0x9f, 0x52, 0xbb, 0x4b, 0x56, 0x9e, 0x04, 0xc6, 0x4d, 0x8f, 0xbc, 0xea, 0x00, 0x49, + 0xa0, 0xcb, 0xc9, 0x69, 0x18, 0x27, 0x41, 0x32, 0xc2, 0x0e, 0x38, 0x2b, 0x11, 0x6b, 0x87, 0xe4, + 0x43, 0xe2, 0xe1, 0xe6, 0x00, 0x1e, 0x94, 0x63, 0x91, 0x7a, 0x9a, 0x6f, 0xa5, 0x5e, 0x84, 0x11, + 0x58, 0x94, 0x3c, 0xea, 0xa7, 0xe6, 0x36, 0x71, 0xe9, 0x0c, 0xe0, 0x52, 0x3d, 0x33, 0xa9, 0xcf, + 0x85, 0x56, 0xfa, 0x55, 0x78, 0x0c, 0xe6, 0x93, 0xdb, 0x24, 0x47, 0xdf, 0xce, 0x20, 0xf7, 0x80, + 0xb4, 0x0d, 0x7c, 0xf4, 0x49, 0xf7, 0x80, 0x76, 0x05, 0xbe, 0x0f, 0x16, 0x52, 0x76, 0x47, 0x3c, + 0x3d, 0x22, 0x9e, 0x36, 0x06, 0xdf, 0x5a, 0xec, 0x6a, 0xb6, 0x95, 0x72, 0x09, 0x2e, 0x83, 0x42, + 0xd0, 0xf6, 0x83, 0x23, 0x7e, 0xdc, 0x04, 0xf8, 0x11, 0x7b, 0x7b, 0xc8, 0xcd, 0x13, 0x2b, 0x3b, + 0x3c, 0x3e, 0x06, 0x66, 0x29, 0x48, 0xab, 0x6d, 0xe7, 0xb5, 0x1e, 0xb7, 0xb6, 0x87, 0x5c, 0x48, + 0x68, 0xd4, 0x5a, 0x8a, 0x08, 0x58, 0xb7, 0x7f, 0xc0, 0x27, 0x12, 0xc4, 0xca, 0x7a, 0xf7, 0x22, + 0xa0, 0x5f, 0x59, 0xdb, 0x86, 0x6c, 0xbc, 0x01, 0x88, 0x91, 0x76, 0xe1, 0x05, 0x00, 0x18, 0x04, + 0xdf, 0x87, 0x11, 0x7e, 0x10, 0xdd, 0x1e, 0x72, 0xc7, 0x29, 0x02, 0xdf, 0x5b, 0xca, 0x56, 0x1d, + 0x1b, 0x75, 0x2d, 0xa3, 0x34, 0xac, 0x6c, 0xd5, 0xb1, 0x63, 0x47, 0x54, 0x7b, 0x7a, 0xf8, 0xf1, + 0x58, 0x38, 0xa2, 0x62, 0x22, 0x78, 0x98, 0x90, 0xbc, 0xc2, 0x8f, 0xc6, 0x82, 0x87, 0x09, 0x43, + 0x85, 0x47, 0x43, 0xca, 0xf6, 0xe1, 0xe0, 0x8f, 0x78, 0x22, 0x66, 0x52, 0x9e, 0x3d, 0xe9, 0x69, + 0x8c, 0x88, 0x0c, 0x9b, 0xa6, 0xa1, 0x5f, 0x19, 0x24, 0xf7, 0x8b, 0x2b, 0x74, 0xdc, 0xb6, 0xc2, + 0xe7, 0x3c, 0x2b, 0x78, 0xab, 0xef, 0x79, 0xcd, 0x9e, 0x1f, 0x3f, 0xa6, 0x61, 0xd3, 0x33, 0xba, + 0x0e, 0xba, 0x60, 0x5e, 0x9d, 0xd1, 0x08, 0xc6, 0x5f, 0x1b, 0xec, 0xd1, 0x56, 0x67, 0x24, 0x7a, + 0x47, 0x29, 0x67, 0x95, 0x49, 0x4e, 0x06, 0xa7, 0x63, 0x0b, 0xce, 0xdf, 0xf4, 0xe1, 0x74, 0xec, + 0x24, 0xa7, 0x63, 0x73, 0xce, 0x03, 0xe9, 0x21, 0xbf, 0xa7, 0x06, 0xfa, 0x5b, 0x4a, 0x7a, 0x3e, + 0x41, 0x7a, 0x20, 0x45, 0x3a, 0xa7, 0x0e, 0x89, 0xb2, 0x68, 0xa5, 0x58, 0x7f, 0xd7, 0x8f, 0x96, + 0x07, 0x3b, 0xa7, 0x8e, 0x94, 0xd2, 0x32, 0x40, 0x1a, 0x47, 0xb0, 0xfe, 0x3e, 0x2b, 0x03, 0xa4, + 0x97, 0xb4, 0x0c, 0x10, 0x5b, 0x5a, 0xa8, 0xb4, 0xd3, 0x04, 0xe9, 0x1f, 0xb2, 0x42, 0xa5, 0xcd, + 0xa7, 0x85, 0x4a, 0x8d, 0x69, 0xb4, 0x4c, 0x61, 0x38, 0xed, 0x1f, 0xb3, 0x68, 0xe9, 0x4d, 0xa8, + 0xd1, 0x52, 0x63, 0x5a, 0x06, 0xc8, 0x3d, 0x2a, 0x58, 0xff, 0x94, 0x95, 0x01, 0x72, 0xdb, 0x6a, + 0x19, 0x20, 0x36, 0xce, 0xb9, 0x27, 0x3d, 0x1c, 0x28, 0xcd, 0xff, 0x67, 0x83, 0xc8, 0x60, 0xdf, + 0xe6, 0x97, 0x1f, 0x0a, 0xa5, 0x20, 0xd5, 0x91, 0x81, 0x60, 0xfc, 0x8b, 0xc1, 0x9e, 0xb4, 0xfa, + 0x35, 0xbf, 0x32, 0x58, 0xc8, 0xe0, 0x94, 0x1a, 0xea, 0xaf, 0x7d, 0x38, 0x45, 0xf3, 0x2b, 0x53, + 0x08, 0xa9, 0x46, 0xda, 0x30, 0x42, 0x90, 0xfe, 0x8d, 0x92, 0x9e, 0xd2, 0xfc, 0xea, 0xcc, 0x22, + 0x8b, 0x56, 0x8a, 0xf5, 0xef, 0xfd, 0x68, 0x45, 0xf3, 0xab, 0x13, 0x8e, 0xb4, 0x0c, 0xa8, 0xcd, + 0xff, 0x8f, 0xac, 0x0c, 0xc8, 0xcd, 0xaf, 0x0c, 0x03, 0xd2, 0x42, 0xd5, 0x9a, 0xff, 0x9f, 0x59, + 0xa1, 0x2a, 0xcd, 0xaf, 0x8e, 0x0e, 0xd2, 0x68, 0xb5, 0xe6, 0xff, 0x57, 0x16, 0xad, 0xd2, 0xfc, + 0xea, 0xb3, 0x68, 0x5a, 0x06, 0xd4, 0xe6, 0xff, 0x77, 0x56, 0x06, 0xe4, 0xe6, 0x57, 0x06, 0x0e, + 0x9c, 0xf3, 0xa1, 0x34, 0xd7, 0xe5, 0xef, 0x70, 0xd0, 0x77, 0x73, 0x6c, 0x4e, 0x96, 0xd8, 0x3b, + 0x43, 0xc4, 0x33, 0x5f, 0x6e, 0x81, 0x8f, 0x80, 0x18, 0x1a, 0xd6, 0xc4, 0xcb, 0x1a, 0xf4, 0xbd, + 0x5c, 0xc6, 0xf9, 0xf1, 0x94, 0x43, 0x5c, 0xe1, 0x5f, 0x98, 0xe0, 0x47, 0xc1, 0x8c, 0x34, 0xc4, + 0xe6, 0x2f, 0x8e, 0xd0, 0xf7, 0xb3, 0xc8, 0xaa, 0x18, 0xf3, 0xd8, 0x8b, 0x5e, 0xc6, 0x64, 0xc2, + 0x04, 0xb7, 0xd4, 0xb9, 0x70, 0xaf, 0xde, 0x45, 0x3f, 0xa0, 0x44, 0x0b, 0x69, 0x45, 0xe8, 0xd5, + 0xbb, 0xca, 0xc4, 0xb8, 0x57, 0xef, 0xc2, 0x4d, 0x20, 0x66, 0x8b, 0x35, 0xaf, 0x7d, 0x82, 0x7e, + 0x48, 0xd7, 0xcf, 0x26, 0xd6, 0x6f, 0xb5, 0x4f, 0xdc, 0x3c, 0x87, 0x6e, 0xb5, 0x4f, 0xe0, 0x5d, + 0x69, 0xd6, 0xfc, 0x0a, 0x97, 0x01, 0xfd, 0x88, 0xae, 0x9d, 0x4f, 0xac, 0xa5, 0x55, 0x12, 0xd3, + 0x4d, 0xf2, 0x15, 0x97, 0x27, 0x6e, 0x50, 0x5e, 0x9e, 0x1f, 0xe7, 0x48, 0xb5, 0xfb, 0x95, 0x47, + 0xf4, 0xa5, 0x54, 0x1e, 0x41, 0x14, 0x97, 0xe7, 0x27, 0xb9, 0x0c, 0x85, 0x93, 0xca, 0xc3, 0x97, + 0xc5, 0xe5, 0x91, 0xb9, 0x48, 0x79, 0x48, 0x75, 0x7e, 0x9a, 0xc5, 0x25, 0x55, 0x27, 0x1e, 0x0a, + 0xb2, 0x55, 0xb8, 0x3a, 0xf2, 0xad, 0x82, 0xab, 0xf3, 0x4b, 0x4a, 0x94, 0x5d, 0x1d, 0xe9, 0xee, + 0x60, 0xd5, 0x11, 0x14, 0xb8, 0x3a, 0x3f, 0xa3, 0xeb, 0x33, 0xaa, 0xc3, 0xa1, 0xac, 0x3a, 0x62, + 0x25, 0xad, 0xce, 0xcf, 0xe9, 0xda, 0xcc, 0xea, 0x70, 0x38, 0xad, 0xce, 0x05, 0x00, 0xc8, 0xfe, + 0xdb, 0x5e, 0xcb, 0x5f, 0x43, 0x9f, 0x36, 0xc9, 0x6b, 0x28, 0xc9, 0x04, 0x2d, 0x90, 0xa7, 0xfd, + 0x8b, 0xbf, 0xae, 0xa3, 0xcf, 0xc8, 0x88, 0x5d, 0x6c, 0x82, 0x17, 0x41, 0xa1, 0x16, 0x43, 0x36, + 0xd0, 0x67, 0x19, 0xa4, 0xca, 0x21, 0x1b, 0x70, 0x09, 0x4c, 0x50, 0x04, 0x81, 0xd8, 0x35, 0xf4, + 0x39, 0x9d, 0x86, 0xfc, 0x3d, 0x49, 0xbe, 0xad, 0x62, 0xc8, 0x4d, 0xf4, 0x79, 0x8a, 0x90, 0x6d, + 0x70, 0x99, 0xd3, 0xac, 0x12, 0x1e, 0x07, 0x7d, 0x41, 0x01, 0x61, 0x1e, 0x47, 0xec, 0x08, 0x7f, + 0xbb, 0x85, 0xbe, 0xa8, 0x3b, 0xba, 0x85, 0x01, 0x22, 0xb4, 0x4d, 0xf4, 0x25, 0x3d, 0xda, 0xcd, + 0x78, 0xcb, 0xf8, 0xeb, 0x6d, 0xf4, 0x65, 0x9d, 0xe2, 0x36, 0x5c, 0x02, 0x85, 0xaa, 0x40, 0xac, + 0xad, 0xa2, 0xaf, 0xb0, 0x38, 0x04, 0xc9, 0xda, 0x2a, 0xc1, 0xec, 0x54, 0xde, 0x7d, 0x50, 0xdb, + 0xdd, 0x7a, 0x5c, 0x59, 0x5b, 0x43, 0x5f, 0xe5, 0x18, 0x6c, 0xa4, 0xb6, 0x18, 0x43, 0x72, 0xbd, + 0x8e, 0xbe, 0xa6, 0x60, 0x88, 0x0d, 0x5e, 0x02, 0x93, 0x35, 0x29, 0xbf, 0x6b, 0x1b, 0xe8, 0xeb, + 0x09, 0x6f, 0x1b, 0x14, 0x55, 0x8d, 0x51, 0x36, 0xfa, 0x46, 0x02, 0x65, 0xc7, 0x09, 0xa4, 0xa0, + 0x9b, 0xe8, 0x9b, 0x72, 0x02, 0x09, 0x48, 0xca, 0x32, 0xdd, 0x9d, 0x83, 0xbe, 0x95, 0x00, 0x39, + 0xd8, 0x9f, 0x14, 0xd3, 0xad, 0x5a, 0x0d, 0x7d, 0x3b, 0x81, 0xba, 0x85, 0x51, 0x52, 0x4c, 0x9b, + 0xb5, 0x1a, 0xfa, 0x4e, 0x22, 0xaa, 0xcd, 0xc5, 0xe7, 0x60, 0x42, 0x7d, 0xd0, 0x29, 0x00, 0xc3, + 0x63, 0x6f, 0x44, 0x0d, 0x0f, 0xbe, 0x0d, 0xf2, 0xf5, 0x40, 0xbc, 0xd4, 0x40, 0xb9, 0xd3, 0x5e, + 0x80, 0xc8, 0xe8, 0xc5, 0x7b, 0x00, 0x26, 0x87, 0x94, 0xb0, 0x08, 0xcc, 0x97, 0xfe, 0x09, 0x73, + 0x81, 0x7f, 0x85, 0xb3, 0xe0, 0x0c, 0xbd, 0x7d, 0x72, 0xc4, 0x46, 0xbf, 0xdc, 0xc9, 0x6d, 0x1a, + 0x31, 0x83, 0x3c, 0x90, 0x94, 0x19, 0xcc, 0x14, 0x06, 0x53, 0x66, 0x28, 0x83, 0xd9, 0xb4, 0xd1, + 0xa3, 0xcc, 0x31, 0x91, 0xc2, 0x31, 0x91, 0xce, 0xa1, 0x8c, 0x18, 0x65, 0x8e, 0xe1, 0x14, 0x8e, + 0xe1, 0x24, 0x47, 0x62, 0x94, 0x28, 0x73, 0x4c, 0xa7, 0x70, 0x4c, 0xa7, 0x73, 0x28, 0x23, 0x43, + 0x99, 0x03, 0xa6, 0x70, 0x40, 0x99, 0xe3, 0x01, 0x98, 0x4f, 0x1f, 0x0c, 0xca, 0x2c, 0xa3, 0x29, + 0x2c, 0xa3, 0x19, 0x2c, 0xea, 0xf0, 0x4f, 0x66, 0x19, 0x49, 0x61, 0x19, 0x91, 0x59, 0xaa, 0x00, + 0x65, 0x8d, 0xf7, 0x64, 0x9e, 0xa9, 0x14, 0x9e, 0xa9, 0x2c, 0x1e, 0x6d, 0x7c, 0x27, 0xf3, 0x14, + 0x53, 0x78, 0x8a, 0xa9, 0xdd, 0x26, 0x0f, 0xe9, 0x4e, 0xeb, 0xd7, 0x9c, 0xcc, 0xb0, 0x05, 0x66, + 0x52, 0xe6, 0x71, 0xa7, 0x51, 0x18, 0x32, 0xc5, 0x5d, 0x50, 0xd4, 0x87, 0x6f, 0xf2, 0xfa, 0xb1, + 0x94, 0xf5, 0x63, 0x29, 0x4d, 0xa2, 0x0f, 0xda, 0x64, 0x8e, 0xf1, 0x14, 0x8e, 0xf1, 0xe4, 0x36, + 0xf4, 0x89, 0xda, 0x69, 0x14, 0x05, 0x99, 0x22, 0x04, 0xe7, 0xfa, 0x8c, 0xcc, 0x52, 0xa8, 0xde, + 0x91, 0xa9, 0x5e, 0xe3, 0x7d, 0x95, 0xe4, 0xf3, 0x18, 0x9c, 0xef, 0x37, 0x33, 0x4b, 0x71, 0xba, + 0xa6, 0x3a, 0xed, 0xfb, 0x0a, 0x4b, 0x72, 0xd4, 0xa4, 0x0d, 0x97, 0x36, 0x2b, 0x4b, 0x71, 0x72, + 0x47, 0x76, 0x32, 0xe8, 0x4b, 0x2d, 0xc9, 0x9b, 0x07, 0xce, 0x66, 0xce, 0xcb, 0x52, 0xdc, 0xad, + 0xa8, 0xee, 0xb2, 0x5f, 0x75, 0xc5, 0x2e, 0x96, 0x6e, 0x03, 0x20, 0x4d, 0xf6, 0x46, 0x81, 0x59, + 0xdd, 0xdb, 0x2b, 0x0e, 0xe1, 0x5f, 0xca, 0x5b, 0x6e, 0xd1, 0xa0, 0xbf, 0x3c, 0x2f, 0xe6, 0xb0, + 0xbb, 0xdd, 0xca, 0xc3, 0xe2, 0x7f, 0xf9, 0x7f, 0x46, 0x79, 0x42, 0x8c, 0xa2, 0xf0, 0xa9, 0xb2, + 0xf4, 0x06, 0x98, 0xd4, 0x06, 0x92, 0x05, 0x60, 0xd4, 0xf9, 0x81, 0x52, 0xbf, 0x76, 0x13, 0x80, + 0xf8, 0xdf, 0x30, 0xc1, 0x29, 0x90, 0x3f, 0xd8, 0xdd, 0x7f, 0x52, 0xb9, 0xbf, 0x53, 0xdd, 0xa9, + 0x3c, 0x28, 0x0e, 0xc1, 0x02, 0x18, 0x7b, 0xe2, 0xee, 0x3d, 0xdd, 0x2b, 0x1f, 0x54, 0x8b, 0x06, + 0x1c, 0x03, 0xc3, 0x8f, 0xf6, 0xf7, 0x76, 0x8b, 0xb9, 0x6b, 0xf7, 0x40, 0x5e, 0x9e, 0x07, 0x4e, + 0x81, 0x7c, 0x75, 0xcf, 0xad, 0xec, 0x3c, 0xdc, 0xad, 0xd1, 0x48, 0x25, 0x03, 0x8d, 0x58, 0x31, + 0x3c, 0x2f, 0xe6, 0xca, 0x17, 0xc1, 0x85, 0x7a, 0xd0, 0x4a, 0xfc, 0x61, 0x26, 0x25, 0xe7, 0xc5, + 0x08, 0xb1, 0x6e, 0xfc, 0x2f, 0x00, 0x00, 0xff, 0xff, 0x33, 0xc2, 0x0c, 0xb6, 0xeb, 0x26, 0x00, + 0x00, +} diff --git a/vendor/github.com/golang/protobuf/_conformance/conformance_proto/conformance.proto b/vendor/github.com/golang/protobuf/_conformance/conformance_proto/conformance.proto new file mode 100644 index 000000000..95a8fd135 --- /dev/null +++ b/vendor/github.com/golang/protobuf/_conformance/conformance_proto/conformance.proto @@ -0,0 +1,285 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; +package conformance; +option java_package = "com.google.protobuf.conformance"; + +import "google/protobuf/any.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/field_mask.proto"; +import "google/protobuf/struct.proto"; +import "google/protobuf/timestamp.proto"; +import "google/protobuf/wrappers.proto"; + +// This defines the conformance testing protocol. This protocol exists between +// the conformance test suite itself and the code being tested. For each test, +// the suite will send a ConformanceRequest message and expect a +// ConformanceResponse message. +// +// You can either run the tests in two different ways: +// +// 1. in-process (using the interface in conformance_test.h). +// +// 2. as a sub-process communicating over a pipe. Information about how to +// do this is in conformance_test_runner.cc. +// +// Pros/cons of the two approaches: +// +// - running as a sub-process is much simpler for languages other than C/C++. +// +// - running as a sub-process may be more tricky in unusual environments like +// iOS apps, where fork/stdin/stdout are not available. + +enum WireFormat { + UNSPECIFIED = 0; + PROTOBUF = 1; + JSON = 2; +} + +// Represents a single test case's input. The testee should: +// +// 1. parse this proto (which should always succeed) +// 2. parse the protobuf or JSON payload in "payload" (which may fail) +// 3. if the parse succeeded, serialize the message in the requested format. +message ConformanceRequest { + // The payload (whether protobuf of JSON) is always for a TestAllTypes proto + // (see below). + oneof payload { + bytes protobuf_payload = 1; + string json_payload = 2; + } + + // Which format should the testee serialize its message to? + WireFormat requested_output_format = 3; +} + +// Represents a single test case's output. +message ConformanceResponse { + oneof result { + // This string should be set to indicate parsing failed. The string can + // provide more information about the parse error if it is available. + // + // Setting this string does not necessarily mean the testee failed the + // test. Some of the test cases are intentionally invalid input. + string parse_error = 1; + + // If the input was successfully parsed but errors occurred when + // serializing it to the requested output format, set the error message in + // this field. + string serialize_error = 6; + + // This should be set if some other error occurred. This will always + // indicate that the test failed. The string can provide more information + // about the failure. + string runtime_error = 2; + + // If the input was successfully parsed and the requested output was + // protobuf, serialize it to protobuf and set it in this field. + bytes protobuf_payload = 3; + + // If the input was successfully parsed and the requested output was JSON, + // serialize to JSON and set it in this field. + string json_payload = 4; + + // For when the testee skipped the test, likely because a certain feature + // wasn't supported, like JSON input/output. + string skipped = 5; + } +} + +// This proto includes every type of field in both singular and repeated +// forms. +message TestAllTypes { + message NestedMessage { + int32 a = 1; + TestAllTypes corecursive = 2; + } + + enum NestedEnum { + FOO = 0; + BAR = 1; + BAZ = 2; + NEG = -1; // Intentionally negative. + } + + // Singular + int32 optional_int32 = 1; + int64 optional_int64 = 2; + uint32 optional_uint32 = 3; + uint64 optional_uint64 = 4; + sint32 optional_sint32 = 5; + sint64 optional_sint64 = 6; + fixed32 optional_fixed32 = 7; + fixed64 optional_fixed64 = 8; + sfixed32 optional_sfixed32 = 9; + sfixed64 optional_sfixed64 = 10; + float optional_float = 11; + double optional_double = 12; + bool optional_bool = 13; + string optional_string = 14; + bytes optional_bytes = 15; + + NestedMessage optional_nested_message = 18; + ForeignMessage optional_foreign_message = 19; + + NestedEnum optional_nested_enum = 21; + ForeignEnum optional_foreign_enum = 22; + + string optional_string_piece = 24 [ctype=STRING_PIECE]; + string optional_cord = 25 [ctype=CORD]; + + TestAllTypes recursive_message = 27; + + // Repeated + repeated int32 repeated_int32 = 31; + repeated int64 repeated_int64 = 32; + repeated uint32 repeated_uint32 = 33; + repeated uint64 repeated_uint64 = 34; + repeated sint32 repeated_sint32 = 35; + repeated sint64 repeated_sint64 = 36; + repeated fixed32 repeated_fixed32 = 37; + repeated fixed64 repeated_fixed64 = 38; + repeated sfixed32 repeated_sfixed32 = 39; + repeated sfixed64 repeated_sfixed64 = 40; + repeated float repeated_float = 41; + repeated double repeated_double = 42; + repeated bool repeated_bool = 43; + repeated string repeated_string = 44; + repeated bytes repeated_bytes = 45; + + repeated NestedMessage repeated_nested_message = 48; + repeated ForeignMessage repeated_foreign_message = 49; + + repeated NestedEnum repeated_nested_enum = 51; + repeated ForeignEnum repeated_foreign_enum = 52; + + repeated string repeated_string_piece = 54 [ctype=STRING_PIECE]; + repeated string repeated_cord = 55 [ctype=CORD]; + + // Map + map < int32, int32> map_int32_int32 = 56; + map < int64, int64> map_int64_int64 = 57; + map < uint32, uint32> map_uint32_uint32 = 58; + map < uint64, uint64> map_uint64_uint64 = 59; + map < sint32, sint32> map_sint32_sint32 = 60; + map < sint64, sint64> map_sint64_sint64 = 61; + map < fixed32, fixed32> map_fixed32_fixed32 = 62; + map < fixed64, fixed64> map_fixed64_fixed64 = 63; + map map_sfixed32_sfixed32 = 64; + map map_sfixed64_sfixed64 = 65; + map < int32, float> map_int32_float = 66; + map < int32, double> map_int32_double = 67; + map < bool, bool> map_bool_bool = 68; + map < string, string> map_string_string = 69; + map < string, bytes> map_string_bytes = 70; + map < string, NestedMessage> map_string_nested_message = 71; + map < string, ForeignMessage> map_string_foreign_message = 72; + map < string, NestedEnum> map_string_nested_enum = 73; + map < string, ForeignEnum> map_string_foreign_enum = 74; + + oneof oneof_field { + uint32 oneof_uint32 = 111; + NestedMessage oneof_nested_message = 112; + string oneof_string = 113; + bytes oneof_bytes = 114; + bool oneof_bool = 115; + uint64 oneof_uint64 = 116; + float oneof_float = 117; + double oneof_double = 118; + NestedEnum oneof_enum = 119; + } + + // Well-known types + google.protobuf.BoolValue optional_bool_wrapper = 201; + google.protobuf.Int32Value optional_int32_wrapper = 202; + google.protobuf.Int64Value optional_int64_wrapper = 203; + google.protobuf.UInt32Value optional_uint32_wrapper = 204; + google.protobuf.UInt64Value optional_uint64_wrapper = 205; + google.protobuf.FloatValue optional_float_wrapper = 206; + google.protobuf.DoubleValue optional_double_wrapper = 207; + google.protobuf.StringValue optional_string_wrapper = 208; + google.protobuf.BytesValue optional_bytes_wrapper = 209; + + repeated google.protobuf.BoolValue repeated_bool_wrapper = 211; + repeated google.protobuf.Int32Value repeated_int32_wrapper = 212; + repeated google.protobuf.Int64Value repeated_int64_wrapper = 213; + repeated google.protobuf.UInt32Value repeated_uint32_wrapper = 214; + repeated google.protobuf.UInt64Value repeated_uint64_wrapper = 215; + repeated google.protobuf.FloatValue repeated_float_wrapper = 216; + repeated google.protobuf.DoubleValue repeated_double_wrapper = 217; + repeated google.protobuf.StringValue repeated_string_wrapper = 218; + repeated google.protobuf.BytesValue repeated_bytes_wrapper = 219; + + google.protobuf.Duration optional_duration = 301; + google.protobuf.Timestamp optional_timestamp = 302; + google.protobuf.FieldMask optional_field_mask = 303; + google.protobuf.Struct optional_struct = 304; + google.protobuf.Any optional_any = 305; + google.protobuf.Value optional_value = 306; + + repeated google.protobuf.Duration repeated_duration = 311; + repeated google.protobuf.Timestamp repeated_timestamp = 312; + repeated google.protobuf.FieldMask repeated_fieldmask = 313; + repeated google.protobuf.Struct repeated_struct = 324; + repeated google.protobuf.Any repeated_any = 315; + repeated google.protobuf.Value repeated_value = 316; + + // Test field-name-to-JSON-name convention. + // (protobuf says names can be any valid C/C++ identifier.) + int32 fieldname1 = 401; + int32 field_name2 = 402; + int32 _field_name3 = 403; + int32 field__name4_ = 404; + int32 field0name5 = 405; + int32 field_0_name6 = 406; + int32 fieldName7 = 407; + int32 FieldName8 = 408; + int32 field_Name9 = 409; + int32 Field_Name10 = 410; + int32 FIELD_NAME11 = 411; + int32 FIELD_name12 = 412; + int32 __field_name13 = 413; + int32 __Field_name14 = 414; + int32 field__name15 = 415; + int32 field__Name16 = 416; + int32 field_name17__ = 417; + int32 Field_name18__ = 418; +} + +message ForeignMessage { + int32 c = 1; +} + +enum ForeignEnum { + FOREIGN_FOO = 0; + FOREIGN_BAR = 1; + FOREIGN_BAZ = 2; +} diff --git a/vendor/github.com/golang/protobuf/descriptor/descriptor.go b/vendor/github.com/golang/protobuf/descriptor/descriptor.go new file mode 100644 index 000000000..ac7e51bfb --- /dev/null +++ b/vendor/github.com/golang/protobuf/descriptor/descriptor.go @@ -0,0 +1,93 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2016 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Package descriptor provides functions for obtaining protocol buffer +// descriptors for generated Go types. +// +// These functions cannot go in package proto because they depend on the +// generated protobuf descriptor messages, which themselves depend on proto. +package descriptor + +import ( + "bytes" + "compress/gzip" + "fmt" + "io/ioutil" + + "github.com/golang/protobuf/proto" + protobuf "github.com/golang/protobuf/protoc-gen-go/descriptor" +) + +// extractFile extracts a FileDescriptorProto from a gzip'd buffer. +func extractFile(gz []byte) (*protobuf.FileDescriptorProto, error) { + r, err := gzip.NewReader(bytes.NewReader(gz)) + if err != nil { + return nil, fmt.Errorf("failed to open gzip reader: %v", err) + } + defer r.Close() + + b, err := ioutil.ReadAll(r) + if err != nil { + return nil, fmt.Errorf("failed to uncompress descriptor: %v", err) + } + + fd := new(protobuf.FileDescriptorProto) + if err := proto.Unmarshal(b, fd); err != nil { + return nil, fmt.Errorf("malformed FileDescriptorProto: %v", err) + } + + return fd, nil +} + +// Message is a proto.Message with a method to return its descriptor. +// +// Message types generated by the protocol compiler always satisfy +// the Message interface. +type Message interface { + proto.Message + Descriptor() ([]byte, []int) +} + +// ForMessage returns a FileDescriptorProto and a DescriptorProto from within it +// describing the given message. +func ForMessage(msg Message) (fd *protobuf.FileDescriptorProto, md *protobuf.DescriptorProto) { + gz, path := msg.Descriptor() + fd, err := extractFile(gz) + if err != nil { + panic(fmt.Sprintf("invalid FileDescriptorProto for %T: %v", msg, err)) + } + + md = fd.MessageType[path[0]] + for _, i := range path[1:] { + md = md.NestedType[i] + } + return fd, md +} diff --git a/vendor/github.com/golang/protobuf/descriptor/descriptor_test.go b/vendor/github.com/golang/protobuf/descriptor/descriptor_test.go new file mode 100644 index 000000000..27b0729cb --- /dev/null +++ b/vendor/github.com/golang/protobuf/descriptor/descriptor_test.go @@ -0,0 +1,32 @@ +package descriptor_test + +import ( + "fmt" + "testing" + + "github.com/golang/protobuf/descriptor" + tpb "github.com/golang/protobuf/proto/testdata" + protobuf "github.com/golang/protobuf/protoc-gen-go/descriptor" +) + +func TestMessage(t *testing.T) { + var msg *protobuf.DescriptorProto + fd, md := descriptor.ForMessage(msg) + if pkg, want := fd.GetPackage(), "google.protobuf"; pkg != want { + t.Errorf("descriptor.ForMessage(%T).GetPackage() = %q; want %q", msg, pkg, want) + } + if name, want := md.GetName(), "DescriptorProto"; name != want { + t.Fatalf("descriptor.ForMessage(%T).GetName() = %q; want %q", msg, name, want) + } +} + +func Example_Options() { + var msg *tpb.MyMessageSet + _, md := descriptor.ForMessage(msg) + if md.GetOptions().GetMessageSetWireFormat() { + fmt.Printf("%v uses option message_set_wire_format.\n", md.GetName()) + } + + // Output: + // MyMessageSet uses option message_set_wire_format. +} diff --git a/vendor/github.com/golang/protobuf/jsonpb/jsonpb.go b/vendor/github.com/golang/protobuf/jsonpb/jsonpb.go new file mode 100644 index 000000000..dfdfc5b30 --- /dev/null +++ b/vendor/github.com/golang/protobuf/jsonpb/jsonpb.go @@ -0,0 +1,1082 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2015 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +/* +Package jsonpb provides marshaling and unmarshaling between protocol buffers and JSON. +It follows the specification at https://developers.google.com/protocol-buffers/docs/proto3#json. + +This package produces a different output than the standard "encoding/json" package, +which does not operate correctly on protocol buffers. +*/ +package jsonpb + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "math" + "reflect" + "sort" + "strconv" + "strings" + "time" + + "github.com/golang/protobuf/proto" + + stpb "github.com/golang/protobuf/ptypes/struct" +) + +// Marshaler is a configurable object for converting between +// protocol buffer objects and a JSON representation for them. +type Marshaler struct { + // Whether to render enum values as integers, as opposed to string values. + EnumsAsInts bool + + // Whether to render fields with zero values. + EmitDefaults bool + + // A string to indent each level by. The presence of this field will + // also cause a space to appear between the field separator and + // value, and for newlines to be appear between fields and array + // elements. + Indent string + + // Whether to use the original (.proto) name for fields. + OrigName bool + + // A custom URL resolver to use when marshaling Any messages to JSON. + // If unset, the default resolution strategy is to extract the + // fully-qualified type name from the type URL and pass that to + // proto.MessageType(string). + AnyResolver AnyResolver +} + +// AnyResolver takes a type URL, present in an Any message, and resolves it into +// an instance of the associated message. +type AnyResolver interface { + Resolve(typeUrl string) (proto.Message, error) +} + +func defaultResolveAny(typeUrl string) (proto.Message, error) { + // Only the part of typeUrl after the last slash is relevant. + mname := typeUrl + if slash := strings.LastIndex(mname, "/"); slash >= 0 { + mname = mname[slash+1:] + } + mt := proto.MessageType(mname) + if mt == nil { + return nil, fmt.Errorf("unknown message type %q", mname) + } + return reflect.New(mt.Elem()).Interface().(proto.Message), nil +} + +// JSONPBMarshaler is implemented by protobuf messages that customize the +// way they are marshaled to JSON. Messages that implement this should +// also implement JSONPBUnmarshaler so that the custom format can be +// parsed. +type JSONPBMarshaler interface { + MarshalJSONPB(*Marshaler) ([]byte, error) +} + +// JSONPBUnmarshaler is implemented by protobuf messages that customize +// the way they are unmarshaled from JSON. Messages that implement this +// should also implement JSONPBMarshaler so that the custom format can be +// produced. +type JSONPBUnmarshaler interface { + UnmarshalJSONPB(*Unmarshaler, []byte) error +} + +// Marshal marshals a protocol buffer into JSON. +func (m *Marshaler) Marshal(out io.Writer, pb proto.Message) error { + writer := &errWriter{writer: out} + return m.marshalObject(writer, pb, "", "") +} + +// MarshalToString converts a protocol buffer object to JSON string. +func (m *Marshaler) MarshalToString(pb proto.Message) (string, error) { + var buf bytes.Buffer + if err := m.Marshal(&buf, pb); err != nil { + return "", err + } + return buf.String(), nil +} + +type int32Slice []int32 + +var nonFinite = map[string]float64{ + `"NaN"`: math.NaN(), + `"Infinity"`: math.Inf(1), + `"-Infinity"`: math.Inf(-1), +} + +// For sorting extensions ids to ensure stable output. +func (s int32Slice) Len() int { return len(s) } +func (s int32Slice) Less(i, j int) bool { return s[i] < s[j] } +func (s int32Slice) Swap(i, j int) { s[i], s[j] = s[j], s[i] } + +type wkt interface { + XXX_WellKnownType() string +} + +// marshalObject writes a struct to the Writer. +func (m *Marshaler) marshalObject(out *errWriter, v proto.Message, indent, typeURL string) error { + if jsm, ok := v.(JSONPBMarshaler); ok { + b, err := jsm.MarshalJSONPB(m) + if err != nil { + return err + } + if typeURL != "" { + // we are marshaling this object to an Any type + var js map[string]*json.RawMessage + if err = json.Unmarshal(b, &js); err != nil { + return fmt.Errorf("type %T produced invalid JSON: %v", v, err) + } + turl, err := json.Marshal(typeURL) + if err != nil { + return fmt.Errorf("failed to marshal type URL %q to JSON: %v", typeURL, err) + } + js["@type"] = (*json.RawMessage)(&turl) + if b, err = json.Marshal(js); err != nil { + return err + } + } + + out.write(string(b)) + return out.err + } + + s := reflect.ValueOf(v).Elem() + + // Handle well-known types. + if wkt, ok := v.(wkt); ok { + switch wkt.XXX_WellKnownType() { + case "DoubleValue", "FloatValue", "Int64Value", "UInt64Value", + "Int32Value", "UInt32Value", "BoolValue", "StringValue", "BytesValue": + // "Wrappers use the same representation in JSON + // as the wrapped primitive type, ..." + sprop := proto.GetProperties(s.Type()) + return m.marshalValue(out, sprop.Prop[0], s.Field(0), indent) + case "Any": + // Any is a bit more involved. + return m.marshalAny(out, v, indent) + case "Duration": + // "Generated output always contains 3, 6, or 9 fractional digits, + // depending on required precision." + s, ns := s.Field(0).Int(), s.Field(1).Int() + x := fmt.Sprintf("%d.%09d", s, ns) + x = strings.TrimSuffix(x, "000") + x = strings.TrimSuffix(x, "000") + out.write(`"`) + out.write(x) + out.write(`s"`) + return out.err + case "Struct", "ListValue": + // Let marshalValue handle the `Struct.fields` map or the `ListValue.values` slice. + // TODO: pass the correct Properties if needed. + return m.marshalValue(out, &proto.Properties{}, s.Field(0), indent) + case "Timestamp": + // "RFC 3339, where generated output will always be Z-normalized + // and uses 3, 6 or 9 fractional digits." + s, ns := s.Field(0).Int(), s.Field(1).Int() + t := time.Unix(s, ns).UTC() + // time.RFC3339Nano isn't exactly right (we need to get 3/6/9 fractional digits). + x := t.Format("2006-01-02T15:04:05.000000000") + x = strings.TrimSuffix(x, "000") + x = strings.TrimSuffix(x, "000") + out.write(`"`) + out.write(x) + out.write(`Z"`) + return out.err + case "Value": + // Value has a single oneof. + kind := s.Field(0) + if kind.IsNil() { + // "absence of any variant indicates an error" + return errors.New("nil Value") + } + // oneof -> *T -> T -> T.F + x := kind.Elem().Elem().Field(0) + // TODO: pass the correct Properties if needed. + return m.marshalValue(out, &proto.Properties{}, x, indent) + } + } + + out.write("{") + if m.Indent != "" { + out.write("\n") + } + + firstField := true + + if typeURL != "" { + if err := m.marshalTypeURL(out, indent, typeURL); err != nil { + return err + } + firstField = false + } + + for i := 0; i < s.NumField(); i++ { + value := s.Field(i) + valueField := s.Type().Field(i) + if strings.HasPrefix(valueField.Name, "XXX_") { + continue + } + + // IsNil will panic on most value kinds. + switch value.Kind() { + case reflect.Chan, reflect.Func, reflect.Interface: + if value.IsNil() { + continue + } + } + + if !m.EmitDefaults { + switch value.Kind() { + case reflect.Bool: + if !value.Bool() { + continue + } + case reflect.Int32, reflect.Int64: + if value.Int() == 0 { + continue + } + case reflect.Uint32, reflect.Uint64: + if value.Uint() == 0 { + continue + } + case reflect.Float32, reflect.Float64: + if value.Float() == 0 { + continue + } + case reflect.String: + if value.Len() == 0 { + continue + } + case reflect.Map, reflect.Ptr, reflect.Slice: + if value.IsNil() { + continue + } + } + } + + // Oneof fields need special handling. + if valueField.Tag.Get("protobuf_oneof") != "" { + // value is an interface containing &T{real_value}. + sv := value.Elem().Elem() // interface -> *T -> T + value = sv.Field(0) + valueField = sv.Type().Field(0) + } + prop := jsonProperties(valueField, m.OrigName) + if !firstField { + m.writeSep(out) + } + if err := m.marshalField(out, prop, value, indent); err != nil { + return err + } + firstField = false + } + + // Handle proto2 extensions. + if ep, ok := v.(proto.Message); ok { + extensions := proto.RegisteredExtensions(v) + // Sort extensions for stable output. + ids := make([]int32, 0, len(extensions)) + for id, desc := range extensions { + if !proto.HasExtension(ep, desc) { + continue + } + ids = append(ids, id) + } + sort.Sort(int32Slice(ids)) + for _, id := range ids { + desc := extensions[id] + if desc == nil { + // unknown extension + continue + } + ext, extErr := proto.GetExtension(ep, desc) + if extErr != nil { + return extErr + } + value := reflect.ValueOf(ext) + var prop proto.Properties + prop.Parse(desc.Tag) + prop.JSONName = fmt.Sprintf("[%s]", desc.Name) + if !firstField { + m.writeSep(out) + } + if err := m.marshalField(out, &prop, value, indent); err != nil { + return err + } + firstField = false + } + + } + + if m.Indent != "" { + out.write("\n") + out.write(indent) + } + out.write("}") + return out.err +} + +func (m *Marshaler) writeSep(out *errWriter) { + if m.Indent != "" { + out.write(",\n") + } else { + out.write(",") + } +} + +func (m *Marshaler) marshalAny(out *errWriter, any proto.Message, indent string) error { + // "If the Any contains a value that has a special JSON mapping, + // it will be converted as follows: {"@type": xxx, "value": yyy}. + // Otherwise, the value will be converted into a JSON object, + // and the "@type" field will be inserted to indicate the actual data type." + v := reflect.ValueOf(any).Elem() + turl := v.Field(0).String() + val := v.Field(1).Bytes() + + var msg proto.Message + var err error + if m.AnyResolver != nil { + msg, err = m.AnyResolver.Resolve(turl) + } else { + msg, err = defaultResolveAny(turl) + } + if err != nil { + return err + } + + if err := proto.Unmarshal(val, msg); err != nil { + return err + } + + if _, ok := msg.(wkt); ok { + out.write("{") + if m.Indent != "" { + out.write("\n") + } + if err := m.marshalTypeURL(out, indent, turl); err != nil { + return err + } + m.writeSep(out) + if m.Indent != "" { + out.write(indent) + out.write(m.Indent) + out.write(`"value": `) + } else { + out.write(`"value":`) + } + if err := m.marshalObject(out, msg, indent+m.Indent, ""); err != nil { + return err + } + if m.Indent != "" { + out.write("\n") + out.write(indent) + } + out.write("}") + return out.err + } + + return m.marshalObject(out, msg, indent, turl) +} + +func (m *Marshaler) marshalTypeURL(out *errWriter, indent, typeURL string) error { + if m.Indent != "" { + out.write(indent) + out.write(m.Indent) + } + out.write(`"@type":`) + if m.Indent != "" { + out.write(" ") + } + b, err := json.Marshal(typeURL) + if err != nil { + return err + } + out.write(string(b)) + return out.err +} + +// marshalField writes field description and value to the Writer. +func (m *Marshaler) marshalField(out *errWriter, prop *proto.Properties, v reflect.Value, indent string) error { + if m.Indent != "" { + out.write(indent) + out.write(m.Indent) + } + out.write(`"`) + out.write(prop.JSONName) + out.write(`":`) + if m.Indent != "" { + out.write(" ") + } + if err := m.marshalValue(out, prop, v, indent); err != nil { + return err + } + return nil +} + +// marshalValue writes the value to the Writer. +func (m *Marshaler) marshalValue(out *errWriter, prop *proto.Properties, v reflect.Value, indent string) error { + var err error + v = reflect.Indirect(v) + + // Handle nil pointer + if v.Kind() == reflect.Invalid { + out.write("null") + return out.err + } + + // Handle repeated elements. + if v.Kind() == reflect.Slice && v.Type().Elem().Kind() != reflect.Uint8 { + out.write("[") + comma := "" + for i := 0; i < v.Len(); i++ { + sliceVal := v.Index(i) + out.write(comma) + if m.Indent != "" { + out.write("\n") + out.write(indent) + out.write(m.Indent) + out.write(m.Indent) + } + if err := m.marshalValue(out, prop, sliceVal, indent+m.Indent); err != nil { + return err + } + comma = "," + } + if m.Indent != "" { + out.write("\n") + out.write(indent) + out.write(m.Indent) + } + out.write("]") + return out.err + } + + // Handle well-known types. + // Most are handled up in marshalObject (because 99% are messages). + if wkt, ok := v.Interface().(wkt); ok { + switch wkt.XXX_WellKnownType() { + case "NullValue": + out.write("null") + return out.err + } + } + + // Handle enumerations. + if !m.EnumsAsInts && prop.Enum != "" { + // Unknown enum values will are stringified by the proto library as their + // value. Such values should _not_ be quoted or they will be interpreted + // as an enum string instead of their value. + enumStr := v.Interface().(fmt.Stringer).String() + var valStr string + if v.Kind() == reflect.Ptr { + valStr = strconv.Itoa(int(v.Elem().Int())) + } else { + valStr = strconv.Itoa(int(v.Int())) + } + isKnownEnum := enumStr != valStr + if isKnownEnum { + out.write(`"`) + } + out.write(enumStr) + if isKnownEnum { + out.write(`"`) + } + return out.err + } + + // Handle nested messages. + if v.Kind() == reflect.Struct { + return m.marshalObject(out, v.Addr().Interface().(proto.Message), indent+m.Indent, "") + } + + // Handle maps. + // Since Go randomizes map iteration, we sort keys for stable output. + if v.Kind() == reflect.Map { + out.write(`{`) + keys := v.MapKeys() + sort.Sort(mapKeys(keys)) + for i, k := range keys { + if i > 0 { + out.write(`,`) + } + if m.Indent != "" { + out.write("\n") + out.write(indent) + out.write(m.Indent) + out.write(m.Indent) + } + + b, err := json.Marshal(k.Interface()) + if err != nil { + return err + } + s := string(b) + + // If the JSON is not a string value, encode it again to make it one. + if !strings.HasPrefix(s, `"`) { + b, err := json.Marshal(s) + if err != nil { + return err + } + s = string(b) + } + + out.write(s) + out.write(`:`) + if m.Indent != "" { + out.write(` `) + } + + if err := m.marshalValue(out, prop, v.MapIndex(k), indent+m.Indent); err != nil { + return err + } + } + if m.Indent != "" { + out.write("\n") + out.write(indent) + out.write(m.Indent) + } + out.write(`}`) + return out.err + } + + // Handle non-finite floats, e.g. NaN, Infinity and -Infinity. + if v.Kind() == reflect.Float32 || v.Kind() == reflect.Float64 { + f := v.Float() + var sval string + switch { + case math.IsInf(f, 1): + sval = `"Infinity"` + case math.IsInf(f, -1): + sval = `"-Infinity"` + case math.IsNaN(f): + sval = `"NaN"` + } + if sval != "" { + out.write(sval) + return out.err + } + } + + // Default handling defers to the encoding/json library. + b, err := json.Marshal(v.Interface()) + if err != nil { + return err + } + needToQuote := string(b[0]) != `"` && (v.Kind() == reflect.Int64 || v.Kind() == reflect.Uint64) + if needToQuote { + out.write(`"`) + } + out.write(string(b)) + if needToQuote { + out.write(`"`) + } + return out.err +} + +// Unmarshaler is a configurable object for converting from a JSON +// representation to a protocol buffer object. +type Unmarshaler struct { + // Whether to allow messages to contain unknown fields, as opposed to + // failing to unmarshal. + AllowUnknownFields bool + + // A custom URL resolver to use when unmarshaling Any messages from JSON. + // If unset, the default resolution strategy is to extract the + // fully-qualified type name from the type URL and pass that to + // proto.MessageType(string). + AnyResolver AnyResolver +} + +// UnmarshalNext unmarshals the next protocol buffer from a JSON object stream. +// This function is lenient and will decode any options permutations of the +// related Marshaler. +func (u *Unmarshaler) UnmarshalNext(dec *json.Decoder, pb proto.Message) error { + inputValue := json.RawMessage{} + if err := dec.Decode(&inputValue); err != nil { + return err + } + return u.unmarshalValue(reflect.ValueOf(pb).Elem(), inputValue, nil) +} + +// Unmarshal unmarshals a JSON object stream into a protocol +// buffer. This function is lenient and will decode any options +// permutations of the related Marshaler. +func (u *Unmarshaler) Unmarshal(r io.Reader, pb proto.Message) error { + dec := json.NewDecoder(r) + return u.UnmarshalNext(dec, pb) +} + +// UnmarshalNext unmarshals the next protocol buffer from a JSON object stream. +// This function is lenient and will decode any options permutations of the +// related Marshaler. +func UnmarshalNext(dec *json.Decoder, pb proto.Message) error { + return new(Unmarshaler).UnmarshalNext(dec, pb) +} + +// Unmarshal unmarshals a JSON object stream into a protocol +// buffer. This function is lenient and will decode any options +// permutations of the related Marshaler. +func Unmarshal(r io.Reader, pb proto.Message) error { + return new(Unmarshaler).Unmarshal(r, pb) +} + +// UnmarshalString will populate the fields of a protocol buffer based +// on a JSON string. This function is lenient and will decode any options +// permutations of the related Marshaler. +func UnmarshalString(str string, pb proto.Message) error { + return new(Unmarshaler).Unmarshal(strings.NewReader(str), pb) +} + +// unmarshalValue converts/copies a value into the target. +// prop may be nil. +func (u *Unmarshaler) unmarshalValue(target reflect.Value, inputValue json.RawMessage, prop *proto.Properties) error { + targetType := target.Type() + + // Allocate memory for pointer fields. + if targetType.Kind() == reflect.Ptr { + // If input value is "null" and target is a pointer type, then the field should be treated as not set + // UNLESS the target is structpb.Value, in which case it should be set to structpb.NullValue. + _, isJSONPBUnmarshaler := target.Interface().(JSONPBUnmarshaler) + if string(inputValue) == "null" && targetType != reflect.TypeOf(&stpb.Value{}) && !isJSONPBUnmarshaler { + return nil + } + target.Set(reflect.New(targetType.Elem())) + + return u.unmarshalValue(target.Elem(), inputValue, prop) + } + + if jsu, ok := target.Addr().Interface().(JSONPBUnmarshaler); ok { + return jsu.UnmarshalJSONPB(u, []byte(inputValue)) + } + + // Handle well-known types that are not pointers. + if w, ok := target.Addr().Interface().(wkt); ok { + switch w.XXX_WellKnownType() { + case "DoubleValue", "FloatValue", "Int64Value", "UInt64Value", + "Int32Value", "UInt32Value", "BoolValue", "StringValue", "BytesValue": + return u.unmarshalValue(target.Field(0), inputValue, prop) + case "Any": + // Use json.RawMessage pointer type instead of value to support pre-1.8 version. + // 1.8 changed RawMessage.MarshalJSON from pointer type to value type, see + // https://github.com/golang/go/issues/14493 + var jsonFields map[string]*json.RawMessage + if err := json.Unmarshal(inputValue, &jsonFields); err != nil { + return err + } + + val, ok := jsonFields["@type"] + if !ok || val == nil { + return errors.New("Any JSON doesn't have '@type'") + } + + var turl string + if err := json.Unmarshal([]byte(*val), &turl); err != nil { + return fmt.Errorf("can't unmarshal Any's '@type': %q", *val) + } + target.Field(0).SetString(turl) + + var m proto.Message + var err error + if u.AnyResolver != nil { + m, err = u.AnyResolver.Resolve(turl) + } else { + m, err = defaultResolveAny(turl) + } + if err != nil { + return err + } + + if _, ok := m.(wkt); ok { + val, ok := jsonFields["value"] + if !ok { + return errors.New("Any JSON doesn't have 'value'") + } + + if err := u.unmarshalValue(reflect.ValueOf(m).Elem(), *val, nil); err != nil { + return fmt.Errorf("can't unmarshal Any nested proto %T: %v", m, err) + } + } else { + delete(jsonFields, "@type") + nestedProto, err := json.Marshal(jsonFields) + if err != nil { + return fmt.Errorf("can't generate JSON for Any's nested proto to be unmarshaled: %v", err) + } + + if err = u.unmarshalValue(reflect.ValueOf(m).Elem(), nestedProto, nil); err != nil { + return fmt.Errorf("can't unmarshal Any nested proto %T: %v", m, err) + } + } + + b, err := proto.Marshal(m) + if err != nil { + return fmt.Errorf("can't marshal proto %T into Any.Value: %v", m, err) + } + target.Field(1).SetBytes(b) + + return nil + case "Duration": + unq, err := strconv.Unquote(string(inputValue)) + if err != nil { + return err + } + + d, err := time.ParseDuration(unq) + if err != nil { + return fmt.Errorf("bad Duration: %v", err) + } + + ns := d.Nanoseconds() + s := ns / 1e9 + ns %= 1e9 + target.Field(0).SetInt(s) + target.Field(1).SetInt(ns) + return nil + case "Timestamp": + unq, err := strconv.Unquote(string(inputValue)) + if err != nil { + return err + } + + t, err := time.Parse(time.RFC3339Nano, unq) + if err != nil { + return fmt.Errorf("bad Timestamp: %v", err) + } + + target.Field(0).SetInt(t.Unix()) + target.Field(1).SetInt(int64(t.Nanosecond())) + return nil + case "Struct": + var m map[string]json.RawMessage + if err := json.Unmarshal(inputValue, &m); err != nil { + return fmt.Errorf("bad StructValue: %v", err) + } + + target.Field(0).Set(reflect.ValueOf(map[string]*stpb.Value{})) + for k, jv := range m { + pv := &stpb.Value{} + if err := u.unmarshalValue(reflect.ValueOf(pv).Elem(), jv, prop); err != nil { + return fmt.Errorf("bad value in StructValue for key %q: %v", k, err) + } + target.Field(0).SetMapIndex(reflect.ValueOf(k), reflect.ValueOf(pv)) + } + return nil + case "ListValue": + var s []json.RawMessage + if err := json.Unmarshal(inputValue, &s); err != nil { + return fmt.Errorf("bad ListValue: %v", err) + } + + target.Field(0).Set(reflect.ValueOf(make([]*stpb.Value, len(s), len(s)))) + for i, sv := range s { + if err := u.unmarshalValue(target.Field(0).Index(i), sv, prop); err != nil { + return err + } + } + return nil + case "Value": + ivStr := string(inputValue) + if ivStr == "null" { + target.Field(0).Set(reflect.ValueOf(&stpb.Value_NullValue{})) + } else if v, err := strconv.ParseFloat(ivStr, 0); err == nil { + target.Field(0).Set(reflect.ValueOf(&stpb.Value_NumberValue{v})) + } else if v, err := strconv.Unquote(ivStr); err == nil { + target.Field(0).Set(reflect.ValueOf(&stpb.Value_StringValue{v})) + } else if v, err := strconv.ParseBool(ivStr); err == nil { + target.Field(0).Set(reflect.ValueOf(&stpb.Value_BoolValue{v})) + } else if err := json.Unmarshal(inputValue, &[]json.RawMessage{}); err == nil { + lv := &stpb.ListValue{} + target.Field(0).Set(reflect.ValueOf(&stpb.Value_ListValue{lv})) + return u.unmarshalValue(reflect.ValueOf(lv).Elem(), inputValue, prop) + } else if err := json.Unmarshal(inputValue, &map[string]json.RawMessage{}); err == nil { + sv := &stpb.Struct{} + target.Field(0).Set(reflect.ValueOf(&stpb.Value_StructValue{sv})) + return u.unmarshalValue(reflect.ValueOf(sv).Elem(), inputValue, prop) + } else { + return fmt.Errorf("unrecognized type for Value %q", ivStr) + } + return nil + } + } + + // Handle enums, which have an underlying type of int32, + // and may appear as strings. + // The case of an enum appearing as a number is handled + // at the bottom of this function. + if inputValue[0] == '"' && prop != nil && prop.Enum != "" { + vmap := proto.EnumValueMap(prop.Enum) + // Don't need to do unquoting; valid enum names + // are from a limited character set. + s := inputValue[1 : len(inputValue)-1] + n, ok := vmap[string(s)] + if !ok { + return fmt.Errorf("unknown value %q for enum %s", s, prop.Enum) + } + if target.Kind() == reflect.Ptr { // proto2 + target.Set(reflect.New(targetType.Elem())) + target = target.Elem() + } + target.SetInt(int64(n)) + return nil + } + + // Handle nested messages. + if targetType.Kind() == reflect.Struct { + var jsonFields map[string]json.RawMessage + if err := json.Unmarshal(inputValue, &jsonFields); err != nil { + return err + } + + consumeField := func(prop *proto.Properties) (json.RawMessage, bool) { + // Be liberal in what names we accept; both orig_name and camelName are okay. + fieldNames := acceptedJSONFieldNames(prop) + + vOrig, okOrig := jsonFields[fieldNames.orig] + vCamel, okCamel := jsonFields[fieldNames.camel] + if !okOrig && !okCamel { + return nil, false + } + // If, for some reason, both are present in the data, favour the camelName. + var raw json.RawMessage + if okOrig { + raw = vOrig + delete(jsonFields, fieldNames.orig) + } + if okCamel { + raw = vCamel + delete(jsonFields, fieldNames.camel) + } + return raw, true + } + + sprops := proto.GetProperties(targetType) + for i := 0; i < target.NumField(); i++ { + ft := target.Type().Field(i) + if strings.HasPrefix(ft.Name, "XXX_") { + continue + } + + valueForField, ok := consumeField(sprops.Prop[i]) + if !ok { + continue + } + + if err := u.unmarshalValue(target.Field(i), valueForField, sprops.Prop[i]); err != nil { + return err + } + } + // Check for any oneof fields. + if len(jsonFields) > 0 { + for _, oop := range sprops.OneofTypes { + raw, ok := consumeField(oop.Prop) + if !ok { + continue + } + nv := reflect.New(oop.Type.Elem()) + target.Field(oop.Field).Set(nv) + if err := u.unmarshalValue(nv.Elem().Field(0), raw, oop.Prop); err != nil { + return err + } + } + } + // Handle proto2 extensions. + if len(jsonFields) > 0 { + if ep, ok := target.Addr().Interface().(proto.Message); ok { + for _, ext := range proto.RegisteredExtensions(ep) { + name := fmt.Sprintf("[%s]", ext.Name) + raw, ok := jsonFields[name] + if !ok { + continue + } + delete(jsonFields, name) + nv := reflect.New(reflect.TypeOf(ext.ExtensionType).Elem()) + if err := u.unmarshalValue(nv.Elem(), raw, nil); err != nil { + return err + } + if err := proto.SetExtension(ep, ext, nv.Interface()); err != nil { + return err + } + } + } + } + if !u.AllowUnknownFields && len(jsonFields) > 0 { + // Pick any field to be the scapegoat. + var f string + for fname := range jsonFields { + f = fname + break + } + return fmt.Errorf("unknown field %q in %v", f, targetType) + } + return nil + } + + // Handle arrays (which aren't encoded bytes) + if targetType.Kind() == reflect.Slice && targetType.Elem().Kind() != reflect.Uint8 { + var slc []json.RawMessage + if err := json.Unmarshal(inputValue, &slc); err != nil { + return err + } + if slc != nil { + l := len(slc) + target.Set(reflect.MakeSlice(targetType, l, l)) + for i := 0; i < l; i++ { + if err := u.unmarshalValue(target.Index(i), slc[i], prop); err != nil { + return err + } + } + } + return nil + } + + // Handle maps (whose keys are always strings) + if targetType.Kind() == reflect.Map { + var mp map[string]json.RawMessage + if err := json.Unmarshal(inputValue, &mp); err != nil { + return err + } + if mp != nil { + target.Set(reflect.MakeMap(targetType)) + var keyprop, valprop *proto.Properties + if prop != nil { + // These could still be nil if the protobuf metadata is broken somehow. + // TODO: This won't work because the fields are unexported. + // We should probably just reparse them. + //keyprop, valprop = prop.mkeyprop, prop.mvalprop + } + for ks, raw := range mp { + // Unmarshal map key. The core json library already decoded the key into a + // string, so we handle that specially. Other types were quoted post-serialization. + var k reflect.Value + if targetType.Key().Kind() == reflect.String { + k = reflect.ValueOf(ks) + } else { + k = reflect.New(targetType.Key()).Elem() + if err := u.unmarshalValue(k, json.RawMessage(ks), keyprop); err != nil { + return err + } + } + + // Unmarshal map value. + v := reflect.New(targetType.Elem()).Elem() + if err := u.unmarshalValue(v, raw, valprop); err != nil { + return err + } + target.SetMapIndex(k, v) + } + } + return nil + } + + // 64-bit integers can be encoded as strings. In this case we drop + // the quotes and proceed as normal. + isNum := targetType.Kind() == reflect.Int64 || targetType.Kind() == reflect.Uint64 + if isNum && strings.HasPrefix(string(inputValue), `"`) { + inputValue = inputValue[1 : len(inputValue)-1] + } + + // Non-finite numbers can be encoded as strings. + isFloat := targetType.Kind() == reflect.Float32 || targetType.Kind() == reflect.Float64 + if isFloat { + if num, ok := nonFinite[string(inputValue)]; ok { + target.SetFloat(num) + return nil + } + } + + // Use the encoding/json for parsing other value types. + return json.Unmarshal(inputValue, target.Addr().Interface()) +} + +// jsonProperties returns parsed proto.Properties for the field and corrects JSONName attribute. +func jsonProperties(f reflect.StructField, origName bool) *proto.Properties { + var prop proto.Properties + prop.Init(f.Type, f.Name, f.Tag.Get("protobuf"), &f) + if origName || prop.JSONName == "" { + prop.JSONName = prop.OrigName + } + return &prop +} + +type fieldNames struct { + orig, camel string +} + +func acceptedJSONFieldNames(prop *proto.Properties) fieldNames { + opts := fieldNames{orig: prop.OrigName, camel: prop.OrigName} + if prop.JSONName != "" { + opts.camel = prop.JSONName + } + return opts +} + +// Writer wrapper inspired by https://blog.golang.org/errors-are-values +type errWriter struct { + writer io.Writer + err error +} + +func (w *errWriter) write(str string) { + if w.err != nil { + return + } + _, w.err = w.writer.Write([]byte(str)) +} + +// Map fields may have key types of non-float scalars, strings and enums. +// The easiest way to sort them in some deterministic order is to use fmt. +// If this turns out to be inefficient we can always consider other options, +// such as doing a Schwartzian transform. +// +// Numeric keys are sorted in numeric order per +// https://developers.google.com/protocol-buffers/docs/proto#maps. +type mapKeys []reflect.Value + +func (s mapKeys) Len() int { return len(s) } +func (s mapKeys) Swap(i, j int) { s[i], s[j] = s[j], s[i] } +func (s mapKeys) Less(i, j int) bool { + if k := s[i].Kind(); k == s[j].Kind() { + switch k { + case reflect.Int32, reflect.Int64: + return s[i].Int() < s[j].Int() + case reflect.Uint32, reflect.Uint64: + return s[i].Uint() < s[j].Uint() + } + } + return fmt.Sprint(s[i].Interface()) < fmt.Sprint(s[j].Interface()) +} diff --git a/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test.go b/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test.go new file mode 100644 index 000000000..4fdbde15a --- /dev/null +++ b/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test.go @@ -0,0 +1,897 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2015 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package jsonpb + +import ( + "bytes" + "encoding/json" + "io" + "math" + "reflect" + "strings" + "testing" + + "github.com/golang/protobuf/proto" + + pb "github.com/golang/protobuf/jsonpb/jsonpb_test_proto" + proto3pb "github.com/golang/protobuf/proto/proto3_proto" + "github.com/golang/protobuf/ptypes" + anypb "github.com/golang/protobuf/ptypes/any" + durpb "github.com/golang/protobuf/ptypes/duration" + stpb "github.com/golang/protobuf/ptypes/struct" + tspb "github.com/golang/protobuf/ptypes/timestamp" + wpb "github.com/golang/protobuf/ptypes/wrappers" +) + +var ( + marshaler = Marshaler{} + + marshalerAllOptions = Marshaler{ + Indent: " ", + } + + simpleObject = &pb.Simple{ + OInt32: proto.Int32(-32), + OInt64: proto.Int64(-6400000000), + OUint32: proto.Uint32(32), + OUint64: proto.Uint64(6400000000), + OSint32: proto.Int32(-13), + OSint64: proto.Int64(-2600000000), + OFloat: proto.Float32(3.14), + ODouble: proto.Float64(6.02214179e23), + OBool: proto.Bool(true), + OString: proto.String("hello \"there\""), + OBytes: []byte("beep boop"), + } + + simpleObjectJSON = `{` + + `"oBool":true,` + + `"oInt32":-32,` + + `"oInt64":"-6400000000",` + + `"oUint32":32,` + + `"oUint64":"6400000000",` + + `"oSint32":-13,` + + `"oSint64":"-2600000000",` + + `"oFloat":3.14,` + + `"oDouble":6.02214179e+23,` + + `"oString":"hello \"there\"",` + + `"oBytes":"YmVlcCBib29w"` + + `}` + + simpleObjectPrettyJSON = `{ + "oBool": true, + "oInt32": -32, + "oInt64": "-6400000000", + "oUint32": 32, + "oUint64": "6400000000", + "oSint32": -13, + "oSint64": "-2600000000", + "oFloat": 3.14, + "oDouble": 6.02214179e+23, + "oString": "hello \"there\"", + "oBytes": "YmVlcCBib29w" +}` + + repeatsObject = &pb.Repeats{ + RBool: []bool{true, false, true}, + RInt32: []int32{-3, -4, -5}, + RInt64: []int64{-123456789, -987654321}, + RUint32: []uint32{1, 2, 3}, + RUint64: []uint64{6789012345, 3456789012}, + RSint32: []int32{-1, -2, -3}, + RSint64: []int64{-6789012345, -3456789012}, + RFloat: []float32{3.14, 6.28}, + RDouble: []float64{299792458 * 1e20, 6.62606957e-34}, + RString: []string{"happy", "days"}, + RBytes: [][]byte{[]byte("skittles"), []byte("m&m's")}, + } + + repeatsObjectJSON = `{` + + `"rBool":[true,false,true],` + + `"rInt32":[-3,-4,-5],` + + `"rInt64":["-123456789","-987654321"],` + + `"rUint32":[1,2,3],` + + `"rUint64":["6789012345","3456789012"],` + + `"rSint32":[-1,-2,-3],` + + `"rSint64":["-6789012345","-3456789012"],` + + `"rFloat":[3.14,6.28],` + + `"rDouble":[2.99792458e+28,6.62606957e-34],` + + `"rString":["happy","days"],` + + `"rBytes":["c2tpdHRsZXM=","bSZtJ3M="]` + + `}` + + repeatsObjectPrettyJSON = `{ + "rBool": [ + true, + false, + true + ], + "rInt32": [ + -3, + -4, + -5 + ], + "rInt64": [ + "-123456789", + "-987654321" + ], + "rUint32": [ + 1, + 2, + 3 + ], + "rUint64": [ + "6789012345", + "3456789012" + ], + "rSint32": [ + -1, + -2, + -3 + ], + "rSint64": [ + "-6789012345", + "-3456789012" + ], + "rFloat": [ + 3.14, + 6.28 + ], + "rDouble": [ + 2.99792458e+28, + 6.62606957e-34 + ], + "rString": [ + "happy", + "days" + ], + "rBytes": [ + "c2tpdHRsZXM=", + "bSZtJ3M=" + ] +}` + + innerSimple = &pb.Simple{OInt32: proto.Int32(-32)} + innerSimple2 = &pb.Simple{OInt64: proto.Int64(25)} + innerRepeats = &pb.Repeats{RString: []string{"roses", "red"}} + innerRepeats2 = &pb.Repeats{RString: []string{"violets", "blue"}} + complexObject = &pb.Widget{ + Color: pb.Widget_GREEN.Enum(), + RColor: []pb.Widget_Color{pb.Widget_RED, pb.Widget_GREEN, pb.Widget_BLUE}, + Simple: innerSimple, + RSimple: []*pb.Simple{innerSimple, innerSimple2}, + Repeats: innerRepeats, + RRepeats: []*pb.Repeats{innerRepeats, innerRepeats2}, + } + + complexObjectJSON = `{"color":"GREEN",` + + `"rColor":["RED","GREEN","BLUE"],` + + `"simple":{"oInt32":-32},` + + `"rSimple":[{"oInt32":-32},{"oInt64":"25"}],` + + `"repeats":{"rString":["roses","red"]},` + + `"rRepeats":[{"rString":["roses","red"]},{"rString":["violets","blue"]}]` + + `}` + + complexObjectPrettyJSON = `{ + "color": "GREEN", + "rColor": [ + "RED", + "GREEN", + "BLUE" + ], + "simple": { + "oInt32": -32 + }, + "rSimple": [ + { + "oInt32": -32 + }, + { + "oInt64": "25" + } + ], + "repeats": { + "rString": [ + "roses", + "red" + ] + }, + "rRepeats": [ + { + "rString": [ + "roses", + "red" + ] + }, + { + "rString": [ + "violets", + "blue" + ] + } + ] +}` + + colorPrettyJSON = `{ + "color": 2 +}` + + colorListPrettyJSON = `{ + "color": 1000, + "rColor": [ + "RED" + ] +}` + + nummyPrettyJSON = `{ + "nummy": { + "1": 2, + "3": 4 + } +}` + + objjyPrettyJSON = `{ + "objjy": { + "1": { + "dub": 1 + } + } +}` + realNumber = &pb.Real{Value: proto.Float64(3.14159265359)} + realNumberName = "Pi" + complexNumber = &pb.Complex{Imaginary: proto.Float64(0.5772156649)} + realNumberJSON = `{` + + `"value":3.14159265359,` + + `"[jsonpb.Complex.real_extension]":{"imaginary":0.5772156649},` + + `"[jsonpb.name]":"Pi"` + + `}` + + anySimple = &pb.KnownTypes{ + An: &anypb.Any{ + TypeUrl: "something.example.com/jsonpb.Simple", + Value: []byte{ + // &pb.Simple{OBool:true} + 1 << 3, 1, + }, + }, + } + anySimpleJSON = `{"an":{"@type":"something.example.com/jsonpb.Simple","oBool":true}}` + anySimplePrettyJSON = `{ + "an": { + "@type": "something.example.com/jsonpb.Simple", + "oBool": true + } +}` + + anyWellKnown = &pb.KnownTypes{ + An: &anypb.Any{ + TypeUrl: "type.googleapis.com/google.protobuf.Duration", + Value: []byte{ + // &durpb.Duration{Seconds: 1, Nanos: 212000000 } + 1 << 3, 1, // seconds + 2 << 3, 0x80, 0xba, 0x8b, 0x65, // nanos + }, + }, + } + anyWellKnownJSON = `{"an":{"@type":"type.googleapis.com/google.protobuf.Duration","value":"1.212s"}}` + anyWellKnownPrettyJSON = `{ + "an": { + "@type": "type.googleapis.com/google.protobuf.Duration", + "value": "1.212s" + } +}` + + nonFinites = &pb.NonFinites{ + FNan: proto.Float32(float32(math.NaN())), + FPinf: proto.Float32(float32(math.Inf(1))), + FNinf: proto.Float32(float32(math.Inf(-1))), + DNan: proto.Float64(float64(math.NaN())), + DPinf: proto.Float64(float64(math.Inf(1))), + DNinf: proto.Float64(float64(math.Inf(-1))), + } + nonFinitesJSON = `{` + + `"fNan":"NaN",` + + `"fPinf":"Infinity",` + + `"fNinf":"-Infinity",` + + `"dNan":"NaN",` + + `"dPinf":"Infinity",` + + `"dNinf":"-Infinity"` + + `}` +) + +func init() { + if err := proto.SetExtension(realNumber, pb.E_Name, &realNumberName); err != nil { + panic(err) + } + if err := proto.SetExtension(realNumber, pb.E_Complex_RealExtension, complexNumber); err != nil { + panic(err) + } +} + +var marshalingTests = []struct { + desc string + marshaler Marshaler + pb proto.Message + json string +}{ + {"simple flat object", marshaler, simpleObject, simpleObjectJSON}, + {"simple pretty object", marshalerAllOptions, simpleObject, simpleObjectPrettyJSON}, + {"non-finite floats fields object", marshaler, nonFinites, nonFinitesJSON}, + {"repeated fields flat object", marshaler, repeatsObject, repeatsObjectJSON}, + {"repeated fields pretty object", marshalerAllOptions, repeatsObject, repeatsObjectPrettyJSON}, + {"nested message/enum flat object", marshaler, complexObject, complexObjectJSON}, + {"nested message/enum pretty object", marshalerAllOptions, complexObject, complexObjectPrettyJSON}, + {"enum-string flat object", Marshaler{}, + &pb.Widget{Color: pb.Widget_BLUE.Enum()}, `{"color":"BLUE"}`}, + {"enum-value pretty object", Marshaler{EnumsAsInts: true, Indent: " "}, + &pb.Widget{Color: pb.Widget_BLUE.Enum()}, colorPrettyJSON}, + {"unknown enum value object", marshalerAllOptions, + &pb.Widget{Color: pb.Widget_Color(1000).Enum(), RColor: []pb.Widget_Color{pb.Widget_RED}}, colorListPrettyJSON}, + {"repeated proto3 enum", Marshaler{}, + &proto3pb.Message{RFunny: []proto3pb.Message_Humour{ + proto3pb.Message_PUNS, + proto3pb.Message_SLAPSTICK, + }}, + `{"rFunny":["PUNS","SLAPSTICK"]}`}, + {"repeated proto3 enum as int", Marshaler{EnumsAsInts: true}, + &proto3pb.Message{RFunny: []proto3pb.Message_Humour{ + proto3pb.Message_PUNS, + proto3pb.Message_SLAPSTICK, + }}, + `{"rFunny":[1,2]}`}, + {"empty value", marshaler, &pb.Simple3{}, `{}`}, + {"empty value emitted", Marshaler{EmitDefaults: true}, &pb.Simple3{}, `{"dub":0}`}, + {"empty repeated emitted", Marshaler{EmitDefaults: true}, &pb.SimpleSlice3{}, `{"slices":[]}`}, + {"empty map emitted", Marshaler{EmitDefaults: true}, &pb.SimpleMap3{}, `{"stringy":{}}`}, + {"nested struct null", Marshaler{EmitDefaults: true}, &pb.SimpleNull3{}, `{"simple":null}`}, + {"map", marshaler, &pb.Mappy{Nummy: map[int64]int32{1: 2, 3: 4}}, `{"nummy":{"1":2,"3":4}}`}, + {"map", marshalerAllOptions, &pb.Mappy{Nummy: map[int64]int32{1: 2, 3: 4}}, nummyPrettyJSON}, + {"map", marshaler, + &pb.Mappy{Strry: map[string]string{`"one"`: "two", "three": "four"}}, + `{"strry":{"\"one\"":"two","three":"four"}}`}, + {"map", marshaler, + &pb.Mappy{Objjy: map[int32]*pb.Simple3{1: {Dub: 1}}}, `{"objjy":{"1":{"dub":1}}}`}, + {"map", marshalerAllOptions, + &pb.Mappy{Objjy: map[int32]*pb.Simple3{1: {Dub: 1}}}, objjyPrettyJSON}, + {"map", marshaler, &pb.Mappy{Buggy: map[int64]string{1234: "yup"}}, + `{"buggy":{"1234":"yup"}}`}, + {"map", marshaler, &pb.Mappy{Booly: map[bool]bool{false: true}}, `{"booly":{"false":true}}`}, + // TODO: This is broken. + //{"map", marshaler, &pb.Mappy{Enumy: map[string]pb.Numeral{"XIV": pb.Numeral_ROMAN}}, `{"enumy":{"XIV":"ROMAN"}`}, + {"map", Marshaler{EnumsAsInts: true}, &pb.Mappy{Enumy: map[string]pb.Numeral{"XIV": pb.Numeral_ROMAN}}, `{"enumy":{"XIV":2}}`}, + {"map", marshaler, &pb.Mappy{S32Booly: map[int32]bool{1: true, 3: false, 10: true, 12: false}}, `{"s32booly":{"1":true,"3":false,"10":true,"12":false}}`}, + {"map", marshaler, &pb.Mappy{S64Booly: map[int64]bool{1: true, 3: false, 10: true, 12: false}}, `{"s64booly":{"1":true,"3":false,"10":true,"12":false}}`}, + {"map", marshaler, &pb.Mappy{U32Booly: map[uint32]bool{1: true, 3: false, 10: true, 12: false}}, `{"u32booly":{"1":true,"3":false,"10":true,"12":false}}`}, + {"map", marshaler, &pb.Mappy{U64Booly: map[uint64]bool{1: true, 3: false, 10: true, 12: false}}, `{"u64booly":{"1":true,"3":false,"10":true,"12":false}}`}, + {"proto2 map", marshaler, &pb.Maps{MInt64Str: map[int64]string{213: "cat"}}, + `{"mInt64Str":{"213":"cat"}}`}, + {"proto2 map", marshaler, + &pb.Maps{MBoolSimple: map[bool]*pb.Simple{true: {OInt32: proto.Int32(1)}}}, + `{"mBoolSimple":{"true":{"oInt32":1}}}`}, + {"oneof, not set", marshaler, &pb.MsgWithOneof{}, `{}`}, + {"oneof, set", marshaler, &pb.MsgWithOneof{Union: &pb.MsgWithOneof_Title{"Grand Poobah"}}, `{"title":"Grand Poobah"}`}, + {"force orig_name", Marshaler{OrigName: true}, &pb.Simple{OInt32: proto.Int32(4)}, + `{"o_int32":4}`}, + {"proto2 extension", marshaler, realNumber, realNumberJSON}, + {"Any with message", marshaler, anySimple, anySimpleJSON}, + {"Any with message and indent", marshalerAllOptions, anySimple, anySimplePrettyJSON}, + {"Any with WKT", marshaler, anyWellKnown, anyWellKnownJSON}, + {"Any with WKT and indent", marshalerAllOptions, anyWellKnown, anyWellKnownPrettyJSON}, + {"Duration", marshaler, &pb.KnownTypes{Dur: &durpb.Duration{Seconds: 3}}, `{"dur":"3.000s"}`}, + {"Duration", marshaler, &pb.KnownTypes{Dur: &durpb.Duration{Seconds: 100000000, Nanos: 1}}, `{"dur":"100000000.000000001s"}`}, + {"Struct", marshaler, &pb.KnownTypes{St: &stpb.Struct{ + Fields: map[string]*stpb.Value{ + "one": {Kind: &stpb.Value_StringValue{"loneliest number"}}, + "two": {Kind: &stpb.Value_NullValue{stpb.NullValue_NULL_VALUE}}, + }, + }}, `{"st":{"one":"loneliest number","two":null}}`}, + {"empty ListValue", marshaler, &pb.KnownTypes{Lv: &stpb.ListValue{}}, `{"lv":[]}`}, + {"basic ListValue", marshaler, &pb.KnownTypes{Lv: &stpb.ListValue{Values: []*stpb.Value{ + {Kind: &stpb.Value_StringValue{"x"}}, + {Kind: &stpb.Value_NullValue{}}, + {Kind: &stpb.Value_NumberValue{3}}, + {Kind: &stpb.Value_BoolValue{true}}, + }}}, `{"lv":["x",null,3,true]}`}, + {"Timestamp", marshaler, &pb.KnownTypes{Ts: &tspb.Timestamp{Seconds: 14e8, Nanos: 21e6}}, `{"ts":"2014-05-13T16:53:20.021Z"}`}, + {"number Value", marshaler, &pb.KnownTypes{Val: &stpb.Value{Kind: &stpb.Value_NumberValue{1}}}, `{"val":1}`}, + {"null Value", marshaler, &pb.KnownTypes{Val: &stpb.Value{Kind: &stpb.Value_NullValue{stpb.NullValue_NULL_VALUE}}}, `{"val":null}`}, + {"string number value", marshaler, &pb.KnownTypes{Val: &stpb.Value{Kind: &stpb.Value_StringValue{"9223372036854775807"}}}, `{"val":"9223372036854775807"}`}, + {"list of lists Value", marshaler, &pb.KnownTypes{Val: &stpb.Value{ + Kind: &stpb.Value_ListValue{&stpb.ListValue{ + Values: []*stpb.Value{ + {Kind: &stpb.Value_StringValue{"x"}}, + {Kind: &stpb.Value_ListValue{&stpb.ListValue{ + Values: []*stpb.Value{ + {Kind: &stpb.Value_ListValue{&stpb.ListValue{ + Values: []*stpb.Value{{Kind: &stpb.Value_StringValue{"y"}}}, + }}}, + {Kind: &stpb.Value_StringValue{"z"}}, + }, + }}}, + }, + }}, + }}, `{"val":["x",[["y"],"z"]]}`}, + + {"DoubleValue", marshaler, &pb.KnownTypes{Dbl: &wpb.DoubleValue{Value: 1.2}}, `{"dbl":1.2}`}, + {"FloatValue", marshaler, &pb.KnownTypes{Flt: &wpb.FloatValue{Value: 1.2}}, `{"flt":1.2}`}, + {"Int64Value", marshaler, &pb.KnownTypes{I64: &wpb.Int64Value{Value: -3}}, `{"i64":"-3"}`}, + {"UInt64Value", marshaler, &pb.KnownTypes{U64: &wpb.UInt64Value{Value: 3}}, `{"u64":"3"}`}, + {"Int32Value", marshaler, &pb.KnownTypes{I32: &wpb.Int32Value{Value: -4}}, `{"i32":-4}`}, + {"UInt32Value", marshaler, &pb.KnownTypes{U32: &wpb.UInt32Value{Value: 4}}, `{"u32":4}`}, + {"BoolValue", marshaler, &pb.KnownTypes{Bool: &wpb.BoolValue{Value: true}}, `{"bool":true}`}, + {"StringValue", marshaler, &pb.KnownTypes{Str: &wpb.StringValue{Value: "plush"}}, `{"str":"plush"}`}, + {"BytesValue", marshaler, &pb.KnownTypes{Bytes: &wpb.BytesValue{Value: []byte("wow")}}, `{"bytes":"d293"}`}, +} + +func TestMarshaling(t *testing.T) { + for _, tt := range marshalingTests { + json, err := tt.marshaler.MarshalToString(tt.pb) + if err != nil { + t.Errorf("%s: marshaling error: %v", tt.desc, err) + } else if tt.json != json { + t.Errorf("%s: got [%v] want [%v]", tt.desc, json, tt.json) + } + } +} + +func TestMarshalJSONPBMarshaler(t *testing.T) { + rawJson := `{ "foo": "bar", "baz": [0, 1, 2, 3] }` + msg := dynamicMessage{rawJson: rawJson} + str, err := new(Marshaler).MarshalToString(&msg) + if err != nil { + t.Errorf("an unexpected error occurred when marshalling JSONPBMarshaler: %v", err) + } + if str != rawJson { + t.Errorf("marshalling JSON produced incorrect output: got %s, wanted %s", str, rawJson) + } +} + +func TestMarshalAnyJSONPBMarshaler(t *testing.T) { + msg := dynamicMessage{rawJson: `{ "foo": "bar", "baz": [0, 1, 2, 3] }`} + a, err := ptypes.MarshalAny(&msg) + if err != nil { + t.Errorf("an unexpected error occurred when marshalling to Any: %v", err) + } + str, err := new(Marshaler).MarshalToString(a) + if err != nil { + t.Errorf("an unexpected error occurred when marshalling Any to JSON: %v", err) + } + // after custom marshaling, it's round-tripped through JSON decoding/encoding already, + // so the keys are sorted, whitespace is compacted, and "@type" key has been added + expected := `{"@type":"type.googleapis.com/` + dynamicMessageName + `","baz":[0,1,2,3],"foo":"bar"}` + if str != expected { + t.Errorf("marshalling JSON produced incorrect output: got %s, wanted %s", str, expected) + } +} + +var unmarshalingTests = []struct { + desc string + unmarshaler Unmarshaler + json string + pb proto.Message +}{ + {"simple flat object", Unmarshaler{}, simpleObjectJSON, simpleObject}, + {"simple pretty object", Unmarshaler{}, simpleObjectPrettyJSON, simpleObject}, + {"repeated fields flat object", Unmarshaler{}, repeatsObjectJSON, repeatsObject}, + {"repeated fields pretty object", Unmarshaler{}, repeatsObjectPrettyJSON, repeatsObject}, + {"nested message/enum flat object", Unmarshaler{}, complexObjectJSON, complexObject}, + {"nested message/enum pretty object", Unmarshaler{}, complexObjectPrettyJSON, complexObject}, + {"enum-string object", Unmarshaler{}, `{"color":"BLUE"}`, &pb.Widget{Color: pb.Widget_BLUE.Enum()}}, + {"enum-value object", Unmarshaler{}, "{\n \"color\": 2\n}", &pb.Widget{Color: pb.Widget_BLUE.Enum()}}, + {"unknown field with allowed option", Unmarshaler{AllowUnknownFields: true}, `{"unknown": "foo"}`, new(pb.Simple)}, + {"proto3 enum string", Unmarshaler{}, `{"hilarity":"PUNS"}`, &proto3pb.Message{Hilarity: proto3pb.Message_PUNS}}, + {"proto3 enum value", Unmarshaler{}, `{"hilarity":1}`, &proto3pb.Message{Hilarity: proto3pb.Message_PUNS}}, + {"unknown enum value object", + Unmarshaler{}, + "{\n \"color\": 1000,\n \"r_color\": [\n \"RED\"\n ]\n}", + &pb.Widget{Color: pb.Widget_Color(1000).Enum(), RColor: []pb.Widget_Color{pb.Widget_RED}}}, + {"repeated proto3 enum", Unmarshaler{}, `{"rFunny":["PUNS","SLAPSTICK"]}`, + &proto3pb.Message{RFunny: []proto3pb.Message_Humour{ + proto3pb.Message_PUNS, + proto3pb.Message_SLAPSTICK, + }}}, + {"repeated proto3 enum as int", Unmarshaler{}, `{"rFunny":[1,2]}`, + &proto3pb.Message{RFunny: []proto3pb.Message_Humour{ + proto3pb.Message_PUNS, + proto3pb.Message_SLAPSTICK, + }}}, + {"repeated proto3 enum as mix of strings and ints", Unmarshaler{}, `{"rFunny":["PUNS",2]}`, + &proto3pb.Message{RFunny: []proto3pb.Message_Humour{ + proto3pb.Message_PUNS, + proto3pb.Message_SLAPSTICK, + }}}, + {"unquoted int64 object", Unmarshaler{}, `{"oInt64":-314}`, &pb.Simple{OInt64: proto.Int64(-314)}}, + {"unquoted uint64 object", Unmarshaler{}, `{"oUint64":123}`, &pb.Simple{OUint64: proto.Uint64(123)}}, + {"NaN", Unmarshaler{}, `{"oDouble":"NaN"}`, &pb.Simple{ODouble: proto.Float64(math.NaN())}}, + {"Inf", Unmarshaler{}, `{"oFloat":"Infinity"}`, &pb.Simple{OFloat: proto.Float32(float32(math.Inf(1)))}}, + {"-Inf", Unmarshaler{}, `{"oDouble":"-Infinity"}`, &pb.Simple{ODouble: proto.Float64(math.Inf(-1))}}, + {"map", Unmarshaler{}, `{"nummy":{"1":2,"3":4}}`, &pb.Mappy{Nummy: map[int64]int32{1: 2, 3: 4}}}, + {"map", Unmarshaler{}, `{"strry":{"\"one\"":"two","three":"four"}}`, &pb.Mappy{Strry: map[string]string{`"one"`: "two", "three": "four"}}}, + {"map", Unmarshaler{}, `{"objjy":{"1":{"dub":1}}}`, &pb.Mappy{Objjy: map[int32]*pb.Simple3{1: {Dub: 1}}}}, + {"proto2 extension", Unmarshaler{}, realNumberJSON, realNumber}, + {"Any with message", Unmarshaler{}, anySimpleJSON, anySimple}, + {"Any with message and indent", Unmarshaler{}, anySimplePrettyJSON, anySimple}, + {"Any with WKT", Unmarshaler{}, anyWellKnownJSON, anyWellKnown}, + {"Any with WKT and indent", Unmarshaler{}, anyWellKnownPrettyJSON, anyWellKnown}, + // TODO: This is broken. + //{"map", Unmarshaler{}, `{"enumy":{"XIV":"ROMAN"}`, &pb.Mappy{Enumy: map[string]pb.Numeral{"XIV": pb.Numeral_ROMAN}}}, + {"map", Unmarshaler{}, `{"enumy":{"XIV":2}}`, &pb.Mappy{Enumy: map[string]pb.Numeral{"XIV": pb.Numeral_ROMAN}}}, + {"oneof", Unmarshaler{}, `{"salary":31000}`, &pb.MsgWithOneof{Union: &pb.MsgWithOneof_Salary{31000}}}, + {"oneof spec name", Unmarshaler{}, `{"Country":"Australia"}`, &pb.MsgWithOneof{Union: &pb.MsgWithOneof_Country{"Australia"}}}, + {"oneof orig_name", Unmarshaler{}, `{"Country":"Australia"}`, &pb.MsgWithOneof{Union: &pb.MsgWithOneof_Country{"Australia"}}}, + {"oneof spec name2", Unmarshaler{}, `{"homeAddress":"Australia"}`, &pb.MsgWithOneof{Union: &pb.MsgWithOneof_HomeAddress{"Australia"}}}, + {"oneof orig_name2", Unmarshaler{}, `{"home_address":"Australia"}`, &pb.MsgWithOneof{Union: &pb.MsgWithOneof_HomeAddress{"Australia"}}}, + {"orig_name input", Unmarshaler{}, `{"o_bool":true}`, &pb.Simple{OBool: proto.Bool(true)}}, + {"camelName input", Unmarshaler{}, `{"oBool":true}`, &pb.Simple{OBool: proto.Bool(true)}}, + + {"Duration", Unmarshaler{}, `{"dur":"3.000s"}`, &pb.KnownTypes{Dur: &durpb.Duration{Seconds: 3}}}, + {"null Duration", Unmarshaler{}, `{"dur":null}`, &pb.KnownTypes{Dur: nil}}, + {"Timestamp", Unmarshaler{}, `{"ts":"2014-05-13T16:53:20.021Z"}`, &pb.KnownTypes{Ts: &tspb.Timestamp{Seconds: 14e8, Nanos: 21e6}}}, + {"PreEpochTimestamp", Unmarshaler{}, `{"ts":"1969-12-31T23:59:58.999999995Z"}`, &pb.KnownTypes{Ts: &tspb.Timestamp{Seconds: -2, Nanos: 999999995}}}, + {"ZeroTimeTimestamp", Unmarshaler{}, `{"ts":"0001-01-01T00:00:00Z"}`, &pb.KnownTypes{Ts: &tspb.Timestamp{Seconds: -62135596800, Nanos: 0}}}, + {"null Timestamp", Unmarshaler{}, `{"ts":null}`, &pb.KnownTypes{Ts: nil}}, + {"null Struct", Unmarshaler{}, `{"st": null}`, &pb.KnownTypes{St: nil}}, + {"empty Struct", Unmarshaler{}, `{"st": {}}`, &pb.KnownTypes{St: &stpb.Struct{}}}, + {"basic Struct", Unmarshaler{}, `{"st": {"a": "x", "b": null, "c": 3, "d": true}}`, &pb.KnownTypes{St: &stpb.Struct{Fields: map[string]*stpb.Value{ + "a": {Kind: &stpb.Value_StringValue{"x"}}, + "b": {Kind: &stpb.Value_NullValue{}}, + "c": {Kind: &stpb.Value_NumberValue{3}}, + "d": {Kind: &stpb.Value_BoolValue{true}}, + }}}}, + {"nested Struct", Unmarshaler{}, `{"st": {"a": {"b": 1, "c": [{"d": true}, "f"]}}}`, &pb.KnownTypes{St: &stpb.Struct{Fields: map[string]*stpb.Value{ + "a": {Kind: &stpb.Value_StructValue{&stpb.Struct{Fields: map[string]*stpb.Value{ + "b": {Kind: &stpb.Value_NumberValue{1}}, + "c": {Kind: &stpb.Value_ListValue{&stpb.ListValue{Values: []*stpb.Value{ + {Kind: &stpb.Value_StructValue{&stpb.Struct{Fields: map[string]*stpb.Value{"d": {Kind: &stpb.Value_BoolValue{true}}}}}}, + {Kind: &stpb.Value_StringValue{"f"}}, + }}}}, + }}}}, + }}}}, + {"null ListValue", Unmarshaler{}, `{"lv": null}`, &pb.KnownTypes{Lv: nil}}, + {"empty ListValue", Unmarshaler{}, `{"lv": []}`, &pb.KnownTypes{Lv: &stpb.ListValue{}}}, + {"basic ListValue", Unmarshaler{}, `{"lv": ["x", null, 3, true]}`, &pb.KnownTypes{Lv: &stpb.ListValue{Values: []*stpb.Value{ + {Kind: &stpb.Value_StringValue{"x"}}, + {Kind: &stpb.Value_NullValue{}}, + {Kind: &stpb.Value_NumberValue{3}}, + {Kind: &stpb.Value_BoolValue{true}}, + }}}}, + {"number Value", Unmarshaler{}, `{"val":1}`, &pb.KnownTypes{Val: &stpb.Value{Kind: &stpb.Value_NumberValue{1}}}}, + {"null Value", Unmarshaler{}, `{"val":null}`, &pb.KnownTypes{Val: &stpb.Value{Kind: &stpb.Value_NullValue{stpb.NullValue_NULL_VALUE}}}}, + {"bool Value", Unmarshaler{}, `{"val":true}`, &pb.KnownTypes{Val: &stpb.Value{Kind: &stpb.Value_BoolValue{true}}}}, + {"string Value", Unmarshaler{}, `{"val":"x"}`, &pb.KnownTypes{Val: &stpb.Value{Kind: &stpb.Value_StringValue{"x"}}}}, + {"string number value", Unmarshaler{}, `{"val":"9223372036854775807"}`, &pb.KnownTypes{Val: &stpb.Value{Kind: &stpb.Value_StringValue{"9223372036854775807"}}}}, + {"list of lists Value", Unmarshaler{}, `{"val":["x", [["y"], "z"]]}`, &pb.KnownTypes{Val: &stpb.Value{ + Kind: &stpb.Value_ListValue{&stpb.ListValue{ + Values: []*stpb.Value{ + {Kind: &stpb.Value_StringValue{"x"}}, + {Kind: &stpb.Value_ListValue{&stpb.ListValue{ + Values: []*stpb.Value{ + {Kind: &stpb.Value_ListValue{&stpb.ListValue{ + Values: []*stpb.Value{{Kind: &stpb.Value_StringValue{"y"}}}, + }}}, + {Kind: &stpb.Value_StringValue{"z"}}, + }, + }}}, + }, + }}}}}, + + {"DoubleValue", Unmarshaler{}, `{"dbl":1.2}`, &pb.KnownTypes{Dbl: &wpb.DoubleValue{Value: 1.2}}}, + {"FloatValue", Unmarshaler{}, `{"flt":1.2}`, &pb.KnownTypes{Flt: &wpb.FloatValue{Value: 1.2}}}, + {"Int64Value", Unmarshaler{}, `{"i64":"-3"}`, &pb.KnownTypes{I64: &wpb.Int64Value{Value: -3}}}, + {"UInt64Value", Unmarshaler{}, `{"u64":"3"}`, &pb.KnownTypes{U64: &wpb.UInt64Value{Value: 3}}}, + {"Int32Value", Unmarshaler{}, `{"i32":-4}`, &pb.KnownTypes{I32: &wpb.Int32Value{Value: -4}}}, + {"UInt32Value", Unmarshaler{}, `{"u32":4}`, &pb.KnownTypes{U32: &wpb.UInt32Value{Value: 4}}}, + {"BoolValue", Unmarshaler{}, `{"bool":true}`, &pb.KnownTypes{Bool: &wpb.BoolValue{Value: true}}}, + {"StringValue", Unmarshaler{}, `{"str":"plush"}`, &pb.KnownTypes{Str: &wpb.StringValue{Value: "plush"}}}, + {"BytesValue", Unmarshaler{}, `{"bytes":"d293"}`, &pb.KnownTypes{Bytes: &wpb.BytesValue{Value: []byte("wow")}}}, + + // Ensure that `null` as a value ends up with a nil pointer instead of a [type]Value struct. + {"null DoubleValue", Unmarshaler{}, `{"dbl":null}`, &pb.KnownTypes{Dbl: nil}}, + {"null FloatValue", Unmarshaler{}, `{"flt":null}`, &pb.KnownTypes{Flt: nil}}, + {"null Int64Value", Unmarshaler{}, `{"i64":null}`, &pb.KnownTypes{I64: nil}}, + {"null UInt64Value", Unmarshaler{}, `{"u64":null}`, &pb.KnownTypes{U64: nil}}, + {"null Int32Value", Unmarshaler{}, `{"i32":null}`, &pb.KnownTypes{I32: nil}}, + {"null UInt32Value", Unmarshaler{}, `{"u32":null}`, &pb.KnownTypes{U32: nil}}, + {"null BoolValue", Unmarshaler{}, `{"bool":null}`, &pb.KnownTypes{Bool: nil}}, + {"null StringValue", Unmarshaler{}, `{"str":null}`, &pb.KnownTypes{Str: nil}}, + {"null BytesValue", Unmarshaler{}, `{"bytes":null}`, &pb.KnownTypes{Bytes: nil}}, +} + +func TestUnmarshaling(t *testing.T) { + for _, tt := range unmarshalingTests { + // Make a new instance of the type of our expected object. + p := reflect.New(reflect.TypeOf(tt.pb).Elem()).Interface().(proto.Message) + + err := tt.unmarshaler.Unmarshal(strings.NewReader(tt.json), p) + if err != nil { + t.Errorf("%s: %v", tt.desc, err) + continue + } + + // For easier diffs, compare text strings of the protos. + exp := proto.MarshalTextString(tt.pb) + act := proto.MarshalTextString(p) + if string(exp) != string(act) { + t.Errorf("%s: got [%s] want [%s]", tt.desc, act, exp) + } + } +} + +func TestUnmarshalNullArray(t *testing.T) { + var repeats pb.Repeats + if err := UnmarshalString(`{"rBool":null}`, &repeats); err != nil { + t.Fatal(err) + } + if !reflect.DeepEqual(repeats, pb.Repeats{}) { + t.Errorf("got non-nil fields in [%#v]", repeats) + } +} + +func TestUnmarshalNullObject(t *testing.T) { + var maps pb.Maps + if err := UnmarshalString(`{"mInt64Str":null}`, &maps); err != nil { + t.Fatal(err) + } + if !reflect.DeepEqual(maps, pb.Maps{}) { + t.Errorf("got non-nil fields in [%#v]", maps) + } +} + +func TestUnmarshalNext(t *testing.T) { + // We only need to check against a few, not all of them. + tests := unmarshalingTests[:5] + + // Create a buffer with many concatenated JSON objects. + var b bytes.Buffer + for _, tt := range tests { + b.WriteString(tt.json) + } + + dec := json.NewDecoder(&b) + for _, tt := range tests { + // Make a new instance of the type of our expected object. + p := reflect.New(reflect.TypeOf(tt.pb).Elem()).Interface().(proto.Message) + + err := tt.unmarshaler.UnmarshalNext(dec, p) + if err != nil { + t.Errorf("%s: %v", tt.desc, err) + continue + } + + // For easier diffs, compare text strings of the protos. + exp := proto.MarshalTextString(tt.pb) + act := proto.MarshalTextString(p) + if string(exp) != string(act) { + t.Errorf("%s: got [%s] want [%s]", tt.desc, act, exp) + } + } + + p := &pb.Simple{} + err := new(Unmarshaler).UnmarshalNext(dec, p) + if err != io.EOF { + t.Errorf("eof: got %v, expected io.EOF", err) + } +} + +var unmarshalingShouldError = []struct { + desc string + in string + pb proto.Message +}{ + {"a value", "666", new(pb.Simple)}, + {"gibberish", "{adskja123;l23=-=", new(pb.Simple)}, + {"unknown field", `{"unknown": "foo"}`, new(pb.Simple)}, + {"unknown enum name", `{"hilarity":"DAVE"}`, new(proto3pb.Message)}, +} + +func TestUnmarshalingBadInput(t *testing.T) { + for _, tt := range unmarshalingShouldError { + err := UnmarshalString(tt.in, tt.pb) + if err == nil { + t.Errorf("an error was expected when parsing %q instead of an object", tt.desc) + } + } +} + +type funcResolver func(turl string) (proto.Message, error) + +func (fn funcResolver) Resolve(turl string) (proto.Message, error) { + return fn(turl) +} + +func TestAnyWithCustomResolver(t *testing.T) { + var resolvedTypeUrls []string + resolver := funcResolver(func(turl string) (proto.Message, error) { + resolvedTypeUrls = append(resolvedTypeUrls, turl) + return new(pb.Simple), nil + }) + msg := &pb.Simple{ + OBytes: []byte{1, 2, 3, 4}, + OBool: proto.Bool(true), + OString: proto.String("foobar"), + OInt64: proto.Int64(1020304), + } + msgBytes, err := proto.Marshal(msg) + if err != nil { + t.Errorf("an unexpected error occurred when marshaling message: %v", err) + } + // make an Any with a type URL that won't resolve w/out custom resolver + any := &anypb.Any{ + TypeUrl: "https://foobar.com/some.random.MessageKind", + Value: msgBytes, + } + + m := Marshaler{AnyResolver: resolver} + js, err := m.MarshalToString(any) + if err != nil { + t.Errorf("an unexpected error occurred when marshaling any to JSON: %v", err) + } + if len(resolvedTypeUrls) != 1 { + t.Errorf("custom resolver was not invoked during marshaling") + } else if resolvedTypeUrls[0] != "https://foobar.com/some.random.MessageKind" { + t.Errorf("custom resolver was invoked with wrong URL: got %q, wanted %q", resolvedTypeUrls[0], "https://foobar.com/some.random.MessageKind") + } + wanted := `{"@type":"https://foobar.com/some.random.MessageKind","oBool":true,"oInt64":"1020304","oString":"foobar","oBytes":"AQIDBA=="}` + if js != wanted { + t.Errorf("marshalling JSON produced incorrect output: got %s, wanted %s", js, wanted) + } + + u := Unmarshaler{AnyResolver: resolver} + roundTrip := &anypb.Any{} + err = u.Unmarshal(bytes.NewReader([]byte(js)), roundTrip) + if err != nil { + t.Errorf("an unexpected error occurred when unmarshaling any from JSON: %v", err) + } + if len(resolvedTypeUrls) != 2 { + t.Errorf("custom resolver was not invoked during marshaling") + } else if resolvedTypeUrls[1] != "https://foobar.com/some.random.MessageKind" { + t.Errorf("custom resolver was invoked with wrong URL: got %q, wanted %q", resolvedTypeUrls[1], "https://foobar.com/some.random.MessageKind") + } + if !proto.Equal(any, roundTrip) { + t.Errorf("message contents not set correctly after unmarshalling JSON: got %s, wanted %s", roundTrip, any) + } +} + +func TestUnmarshalJSONPBUnmarshaler(t *testing.T) { + rawJson := `{ "foo": "bar", "baz": [0, 1, 2, 3] }` + var msg dynamicMessage + if err := Unmarshal(strings.NewReader(rawJson), &msg); err != nil { + t.Errorf("an unexpected error occurred when parsing into JSONPBUnmarshaler: %v", err) + } + if msg.rawJson != rawJson { + t.Errorf("message contents not set correctly after unmarshalling JSON: got %s, wanted %s", msg.rawJson, rawJson) + } +} + +func TestUnmarshalNullWithJSONPBUnmarshaler(t *testing.T) { + rawJson := `{"stringField":null}` + var ptrFieldMsg ptrFieldMessage + if err := Unmarshal(strings.NewReader(rawJson), &ptrFieldMsg); err != nil { + t.Errorf("unmarshal error: %v", err) + } + + want := ptrFieldMessage{StringField: &stringField{IsSet: true, StringValue: "null"}} + if !proto.Equal(&ptrFieldMsg, &want) { + t.Errorf("unmarshal result StringField: got %v, want %v", ptrFieldMsg, want) + } +} + +func TestUnmarshalAnyJSONPBUnmarshaler(t *testing.T) { + rawJson := `{ "@type": "blah.com/` + dynamicMessageName + `", "foo": "bar", "baz": [0, 1, 2, 3] }` + var got anypb.Any + if err := Unmarshal(strings.NewReader(rawJson), &got); err != nil { + t.Errorf("an unexpected error occurred when parsing into JSONPBUnmarshaler: %v", err) + } + + dm := &dynamicMessage{rawJson: `{"baz":[0,1,2,3],"foo":"bar"}`} + var want anypb.Any + if b, err := proto.Marshal(dm); err != nil { + t.Errorf("an unexpected error occurred when marshaling message: %v", err) + } else { + want.TypeUrl = "blah.com/" + dynamicMessageName + want.Value = b + } + + if !proto.Equal(&got, &want) { + t.Errorf("message contents not set correctly after unmarshalling JSON: got %s, wanted %s", got, want) + } +} + +const ( + dynamicMessageName = "google.protobuf.jsonpb.testing.dynamicMessage" +) + +func init() { + // we register the custom type below so that we can use it in Any types + proto.RegisterType((*dynamicMessage)(nil), dynamicMessageName) +} + +type ptrFieldMessage struct { + StringField *stringField `protobuf:"bytes,1,opt,name=stringField"` +} + +func (m *ptrFieldMessage) Reset() { +} + +func (m *ptrFieldMessage) String() string { + return m.StringField.StringValue +} + +func (m *ptrFieldMessage) ProtoMessage() { +} + +type stringField struct { + IsSet bool `protobuf:"varint,1,opt,name=isSet"` + StringValue string `protobuf:"bytes,2,opt,name=stringValue"` +} + +func (s *stringField) Reset() { +} + +func (s *stringField) String() string { + return s.StringValue +} + +func (s *stringField) ProtoMessage() { +} + +func (s *stringField) UnmarshalJSONPB(jum *Unmarshaler, js []byte) error { + s.IsSet = true + s.StringValue = string(js) + return nil +} + +// dynamicMessage implements protobuf.Message but is not a normal generated message type. +// It provides implementations of JSONPBMarshaler and JSONPBUnmarshaler for JSON support. +type dynamicMessage struct { + rawJson string `protobuf:"bytes,1,opt,name=rawJson"` +} + +func (m *dynamicMessage) Reset() { + m.rawJson = "{}" +} + +func (m *dynamicMessage) String() string { + return m.rawJson +} + +func (m *dynamicMessage) ProtoMessage() { +} + +func (m *dynamicMessage) MarshalJSONPB(jm *Marshaler) ([]byte, error) { + return []byte(m.rawJson), nil +} + +func (m *dynamicMessage) UnmarshalJSONPB(jum *Unmarshaler, js []byte) error { + m.rawJson = string(js) + return nil +} diff --git a/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/Makefile b/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/Makefile new file mode 100644 index 000000000..eeda8ae53 --- /dev/null +++ b/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/Makefile @@ -0,0 +1,33 @@ +# Go support for Protocol Buffers - Google's data interchange format +# +# Copyright 2015 The Go Authors. All rights reserved. +# https://github.com/golang/protobuf +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +regenerate: + protoc --go_out=Mgoogle/protobuf/any.proto=github.com/golang/protobuf/ptypes/any,Mgoogle/protobuf/duration.proto=github.com/golang/protobuf/ptypes/duration,Mgoogle/protobuf/struct.proto=github.com/golang/protobuf/ptypes/struct,Mgoogle/protobuf/timestamp.proto=github.com/golang/protobuf/ptypes/timestamp,Mgoogle/protobuf/wrappers.proto=github.com/golang/protobuf/ptypes/wrappers:. *.proto diff --git a/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/more_test_objects.pb.go b/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/more_test_objects.pb.go new file mode 100644 index 000000000..ebb180e88 --- /dev/null +++ b/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/more_test_objects.pb.go @@ -0,0 +1,266 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: more_test_objects.proto + +/* +Package jsonpb is a generated protocol buffer package. + +It is generated from these files: + more_test_objects.proto + test_objects.proto + +It has these top-level messages: + Simple3 + SimpleSlice3 + SimpleMap3 + SimpleNull3 + Mappy + Simple + NonFinites + Repeats + Widget + Maps + MsgWithOneof + Real + Complex + KnownTypes +*/ +package jsonpb + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type Numeral int32 + +const ( + Numeral_UNKNOWN Numeral = 0 + Numeral_ARABIC Numeral = 1 + Numeral_ROMAN Numeral = 2 +) + +var Numeral_name = map[int32]string{ + 0: "UNKNOWN", + 1: "ARABIC", + 2: "ROMAN", +} +var Numeral_value = map[string]int32{ + "UNKNOWN": 0, + "ARABIC": 1, + "ROMAN": 2, +} + +func (x Numeral) String() string { + return proto.EnumName(Numeral_name, int32(x)) +} +func (Numeral) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } + +type Simple3 struct { + Dub float64 `protobuf:"fixed64,1,opt,name=dub" json:"dub,omitempty"` +} + +func (m *Simple3) Reset() { *m = Simple3{} } +func (m *Simple3) String() string { return proto.CompactTextString(m) } +func (*Simple3) ProtoMessage() {} +func (*Simple3) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } + +func (m *Simple3) GetDub() float64 { + if m != nil { + return m.Dub + } + return 0 +} + +type SimpleSlice3 struct { + Slices []string `protobuf:"bytes,1,rep,name=slices" json:"slices,omitempty"` +} + +func (m *SimpleSlice3) Reset() { *m = SimpleSlice3{} } +func (m *SimpleSlice3) String() string { return proto.CompactTextString(m) } +func (*SimpleSlice3) ProtoMessage() {} +func (*SimpleSlice3) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } + +func (m *SimpleSlice3) GetSlices() []string { + if m != nil { + return m.Slices + } + return nil +} + +type SimpleMap3 struct { + Stringy map[string]string `protobuf:"bytes,1,rep,name=stringy" json:"stringy,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` +} + +func (m *SimpleMap3) Reset() { *m = SimpleMap3{} } +func (m *SimpleMap3) String() string { return proto.CompactTextString(m) } +func (*SimpleMap3) ProtoMessage() {} +func (*SimpleMap3) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } + +func (m *SimpleMap3) GetStringy() map[string]string { + if m != nil { + return m.Stringy + } + return nil +} + +type SimpleNull3 struct { + Simple *Simple3 `protobuf:"bytes,1,opt,name=simple" json:"simple,omitempty"` +} + +func (m *SimpleNull3) Reset() { *m = SimpleNull3{} } +func (m *SimpleNull3) String() string { return proto.CompactTextString(m) } +func (*SimpleNull3) ProtoMessage() {} +func (*SimpleNull3) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } + +func (m *SimpleNull3) GetSimple() *Simple3 { + if m != nil { + return m.Simple + } + return nil +} + +type Mappy struct { + Nummy map[int64]int32 `protobuf:"bytes,1,rep,name=nummy" json:"nummy,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` + Strry map[string]string `protobuf:"bytes,2,rep,name=strry" json:"strry,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Objjy map[int32]*Simple3 `protobuf:"bytes,3,rep,name=objjy" json:"objjy,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Buggy map[int64]string `protobuf:"bytes,4,rep,name=buggy" json:"buggy,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Booly map[bool]bool `protobuf:"bytes,5,rep,name=booly" json:"booly,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` + Enumy map[string]Numeral `protobuf:"bytes,6,rep,name=enumy" json:"enumy,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"varint,2,opt,name=value,enum=jsonpb.Numeral"` + S32Booly map[int32]bool `protobuf:"bytes,7,rep,name=s32booly" json:"s32booly,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` + S64Booly map[int64]bool `protobuf:"bytes,8,rep,name=s64booly" json:"s64booly,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` + U32Booly map[uint32]bool `protobuf:"bytes,9,rep,name=u32booly" json:"u32booly,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` + U64Booly map[uint64]bool `protobuf:"bytes,10,rep,name=u64booly" json:"u64booly,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` +} + +func (m *Mappy) Reset() { *m = Mappy{} } +func (m *Mappy) String() string { return proto.CompactTextString(m) } +func (*Mappy) ProtoMessage() {} +func (*Mappy) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } + +func (m *Mappy) GetNummy() map[int64]int32 { + if m != nil { + return m.Nummy + } + return nil +} + +func (m *Mappy) GetStrry() map[string]string { + if m != nil { + return m.Strry + } + return nil +} + +func (m *Mappy) GetObjjy() map[int32]*Simple3 { + if m != nil { + return m.Objjy + } + return nil +} + +func (m *Mappy) GetBuggy() map[int64]string { + if m != nil { + return m.Buggy + } + return nil +} + +func (m *Mappy) GetBooly() map[bool]bool { + if m != nil { + return m.Booly + } + return nil +} + +func (m *Mappy) GetEnumy() map[string]Numeral { + if m != nil { + return m.Enumy + } + return nil +} + +func (m *Mappy) GetS32Booly() map[int32]bool { + if m != nil { + return m.S32Booly + } + return nil +} + +func (m *Mappy) GetS64Booly() map[int64]bool { + if m != nil { + return m.S64Booly + } + return nil +} + +func (m *Mappy) GetU32Booly() map[uint32]bool { + if m != nil { + return m.U32Booly + } + return nil +} + +func (m *Mappy) GetU64Booly() map[uint64]bool { + if m != nil { + return m.U64Booly + } + return nil +} + +func init() { + proto.RegisterType((*Simple3)(nil), "jsonpb.Simple3") + proto.RegisterType((*SimpleSlice3)(nil), "jsonpb.SimpleSlice3") + proto.RegisterType((*SimpleMap3)(nil), "jsonpb.SimpleMap3") + proto.RegisterType((*SimpleNull3)(nil), "jsonpb.SimpleNull3") + proto.RegisterType((*Mappy)(nil), "jsonpb.Mappy") + proto.RegisterEnum("jsonpb.Numeral", Numeral_name, Numeral_value) +} + +func init() { proto.RegisterFile("more_test_objects.proto", fileDescriptor0) } + +var fileDescriptor0 = []byte{ + // 526 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x94, 0xdd, 0x6b, 0xdb, 0x3c, + 0x14, 0x87, 0x5f, 0x27, 0xf5, 0xd7, 0x49, 0xfb, 0x2e, 0x88, 0xb1, 0x99, 0xf4, 0x62, 0xc5, 0xb0, + 0xad, 0x0c, 0xe6, 0x8b, 0x78, 0x74, 0x5d, 0x77, 0x95, 0x8e, 0x5e, 0x94, 0x11, 0x07, 0x1c, 0xc2, + 0x2e, 0x4b, 0xdc, 0x99, 0x90, 0xcc, 0x5f, 0xd8, 0xd6, 0xc0, 0xd7, 0xfb, 0xbb, 0x07, 0xe3, 0x48, + 0x72, 0x2d, 0x07, 0x85, 0x6c, 0x77, 0x52, 0x7e, 0xcf, 0xe3, 0x73, 0x24, 0x1d, 0x02, 0x2f, 0xd3, + 0xbc, 0x8c, 0x1f, 0xea, 0xb8, 0xaa, 0x1f, 0xf2, 0x68, 0x17, 0x3f, 0xd6, 0x95, 0x57, 0x94, 0x79, + 0x9d, 0x13, 0x63, 0x57, 0xe5, 0x59, 0x11, 0xb9, 0xe7, 0x60, 0x2e, 0xb7, 0x69, 0x91, 0xc4, 0x3e, + 0x19, 0xc3, 0xf0, 0x3b, 0x8d, 0x1c, 0xed, 0x42, 0xbb, 0xd4, 0x42, 0x5c, 0xba, 0x6f, 0xe0, 0x94, + 0x87, 0xcb, 0x64, 0xfb, 0x18, 0xfb, 0xe4, 0x05, 0x18, 0x15, 0xae, 0x2a, 0x47, 0xbb, 0x18, 0x5e, + 0xda, 0xa1, 0xd8, 0xb9, 0xbf, 0x34, 0x00, 0x0e, 0xce, 0xd7, 0x85, 0x4f, 0x3e, 0x81, 0x59, 0xd5, + 0xe5, 0x36, 0xdb, 0x34, 0x8c, 0x1b, 0x4d, 0x5f, 0x79, 0xbc, 0x9a, 0xd7, 0x41, 0xde, 0x92, 0x13, + 0x77, 0x59, 0x5d, 0x36, 0x61, 0xcb, 0x4f, 0x6e, 0xe0, 0x54, 0x0e, 0xb0, 0xa7, 0x1f, 0x71, 0xc3, + 0x7a, 0xb2, 0x43, 0x5c, 0x92, 0xe7, 0xa0, 0xff, 0x5c, 0x27, 0x34, 0x76, 0x06, 0xec, 0x37, 0xbe, + 0xb9, 0x19, 0x5c, 0x6b, 0xee, 0x15, 0x8c, 0xf8, 0xf7, 0x03, 0x9a, 0x24, 0x3e, 0x79, 0x0b, 0x46, + 0xc5, 0xb6, 0xcc, 0x1e, 0x4d, 0x9f, 0xf5, 0x9b, 0xf0, 0x43, 0x11, 0xbb, 0xbf, 0x2d, 0xd0, 0xe7, + 0xeb, 0xa2, 0x68, 0x88, 0x07, 0x7a, 0x46, 0xd3, 0xb4, 0x6d, 0xdb, 0x69, 0x0d, 0x96, 0x7a, 0x01, + 0x46, 0xbc, 0x5f, 0x8e, 0x21, 0x5f, 0xd5, 0x65, 0xd9, 0x38, 0x03, 0x15, 0xbf, 0xc4, 0x48, 0xf0, + 0x0c, 0x43, 0x3e, 0x8f, 0x76, 0xbb, 0xc6, 0x19, 0xaa, 0xf8, 0x05, 0x46, 0x82, 0x67, 0x18, 0xf2, + 0x11, 0xdd, 0x6c, 0x1a, 0xe7, 0x44, 0xc5, 0xdf, 0x62, 0x24, 0x78, 0x86, 0x31, 0x3e, 0xcf, 0x93, + 0xc6, 0xd1, 0x95, 0x3c, 0x46, 0x2d, 0x8f, 0x6b, 0xe4, 0xe3, 0x8c, 0xa6, 0x8d, 0x63, 0xa8, 0xf8, + 0x3b, 0x8c, 0x04, 0xcf, 0x30, 0xf2, 0x11, 0xac, 0xca, 0x9f, 0xf2, 0x12, 0x26, 0x53, 0xce, 0xf7, + 0x8e, 0x2c, 0x52, 0x6e, 0x3d, 0xc1, 0x4c, 0xbc, 0xfa, 0xc0, 0x45, 0x4b, 0x29, 0x8a, 0xb4, 0x15, + 0xc5, 0x16, 0x45, 0xda, 0x56, 0xb4, 0x55, 0xe2, 0xaa, 0x5f, 0x91, 0x4a, 0x15, 0x69, 0x5b, 0x11, + 0x94, 0x62, 0xbf, 0x62, 0x0b, 0x4f, 0xae, 0x01, 0xba, 0x87, 0x96, 0xe7, 0x6f, 0xa8, 0x98, 0x3f, + 0x5d, 0x9a, 0x3f, 0x34, 0xbb, 0x27, 0xff, 0x97, 0xc9, 0x9d, 0xdc, 0x03, 0x74, 0x8f, 0x2f, 0x9b, + 0x3a, 0x37, 0x5f, 0xcb, 0xa6, 0x62, 0x92, 0xfb, 0x4d, 0x74, 0x73, 0x71, 0xac, 0x7d, 0x7b, 0xdf, + 0x7c, 0xba, 0x10, 0xd9, 0xb4, 0x14, 0xa6, 0xb5, 0xd7, 0x7e, 0x37, 0x2b, 0x8a, 0x83, 0xf7, 0xda, + 0xff, 0xbf, 0x6b, 0x3f, 0xa0, 0x69, 0x5c, 0xae, 0x13, 0xf9, 0x53, 0x9f, 0xe1, 0xac, 0x37, 0x43, + 0x8a, 0xcb, 0x38, 0xdc, 0x07, 0xca, 0xf2, 0xab, 0x1e, 0x3b, 0xfe, 0xbe, 0xbc, 0x3a, 0x54, 0xf9, + 0xec, 0x6f, 0xe4, 0x43, 0x95, 0x4f, 0x8e, 0xc8, 0xef, 0xde, 0x83, 0x29, 0x6e, 0x82, 0x8c, 0xc0, + 0x5c, 0x05, 0x5f, 0x83, 0xc5, 0xb7, 0x60, 0xfc, 0x1f, 0x01, 0x30, 0x66, 0xe1, 0xec, 0xf6, 0xfe, + 0xcb, 0x58, 0x23, 0x36, 0xe8, 0xe1, 0x62, 0x3e, 0x0b, 0xc6, 0x83, 0xc8, 0x60, 0x7f, 0xe0, 0xfe, + 0x9f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xdc, 0x84, 0x34, 0xaf, 0xdb, 0x05, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/more_test_objects.proto b/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/more_test_objects.proto new file mode 100644 index 000000000..d254fa5fa --- /dev/null +++ b/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/more_test_objects.proto @@ -0,0 +1,69 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2015 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package jsonpb; + +message Simple3 { + double dub = 1; +} + +message SimpleSlice3 { + repeated string slices = 1; +} + +message SimpleMap3 { + map stringy = 1; +} + +message SimpleNull3 { + Simple3 simple = 1; +} + +enum Numeral { + UNKNOWN = 0; + ARABIC = 1; + ROMAN = 2; +} + +message Mappy { + map nummy = 1; + map strry = 2; + map objjy = 3; + map buggy = 4; + map booly = 5; + map enumy = 6; + map s32booly = 7; + map s64booly = 8; + map u32booly = 9; + map u64booly = 10; +} diff --git a/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/test_objects.pb.go b/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/test_objects.pb.go new file mode 100644 index 000000000..d413d740d --- /dev/null +++ b/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/test_objects.pb.go @@ -0,0 +1,852 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: test_objects.proto + +package jsonpb + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import google_protobuf "github.com/golang/protobuf/ptypes/any" +import google_protobuf1 "github.com/golang/protobuf/ptypes/duration" +import google_protobuf2 "github.com/golang/protobuf/ptypes/struct" +import google_protobuf3 "github.com/golang/protobuf/ptypes/timestamp" +import google_protobuf4 "github.com/golang/protobuf/ptypes/wrappers" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +type Widget_Color int32 + +const ( + Widget_RED Widget_Color = 0 + Widget_GREEN Widget_Color = 1 + Widget_BLUE Widget_Color = 2 +) + +var Widget_Color_name = map[int32]string{ + 0: "RED", + 1: "GREEN", + 2: "BLUE", +} +var Widget_Color_value = map[string]int32{ + "RED": 0, + "GREEN": 1, + "BLUE": 2, +} + +func (x Widget_Color) Enum() *Widget_Color { + p := new(Widget_Color) + *p = x + return p +} +func (x Widget_Color) String() string { + return proto.EnumName(Widget_Color_name, int32(x)) +} +func (x *Widget_Color) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(Widget_Color_value, data, "Widget_Color") + if err != nil { + return err + } + *x = Widget_Color(value) + return nil +} +func (Widget_Color) EnumDescriptor() ([]byte, []int) { return fileDescriptor1, []int{3, 0} } + +// Test message for holding primitive types. +type Simple struct { + OBool *bool `protobuf:"varint,1,opt,name=o_bool,json=oBool" json:"o_bool,omitempty"` + OInt32 *int32 `protobuf:"varint,2,opt,name=o_int32,json=oInt32" json:"o_int32,omitempty"` + OInt64 *int64 `protobuf:"varint,3,opt,name=o_int64,json=oInt64" json:"o_int64,omitempty"` + OUint32 *uint32 `protobuf:"varint,4,opt,name=o_uint32,json=oUint32" json:"o_uint32,omitempty"` + OUint64 *uint64 `protobuf:"varint,5,opt,name=o_uint64,json=oUint64" json:"o_uint64,omitempty"` + OSint32 *int32 `protobuf:"zigzag32,6,opt,name=o_sint32,json=oSint32" json:"o_sint32,omitempty"` + OSint64 *int64 `protobuf:"zigzag64,7,opt,name=o_sint64,json=oSint64" json:"o_sint64,omitempty"` + OFloat *float32 `protobuf:"fixed32,8,opt,name=o_float,json=oFloat" json:"o_float,omitempty"` + ODouble *float64 `protobuf:"fixed64,9,opt,name=o_double,json=oDouble" json:"o_double,omitempty"` + OString *string `protobuf:"bytes,10,opt,name=o_string,json=oString" json:"o_string,omitempty"` + OBytes []byte `protobuf:"bytes,11,opt,name=o_bytes,json=oBytes" json:"o_bytes,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *Simple) Reset() { *m = Simple{} } +func (m *Simple) String() string { return proto.CompactTextString(m) } +func (*Simple) ProtoMessage() {} +func (*Simple) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{0} } + +func (m *Simple) GetOBool() bool { + if m != nil && m.OBool != nil { + return *m.OBool + } + return false +} + +func (m *Simple) GetOInt32() int32 { + if m != nil && m.OInt32 != nil { + return *m.OInt32 + } + return 0 +} + +func (m *Simple) GetOInt64() int64 { + if m != nil && m.OInt64 != nil { + return *m.OInt64 + } + return 0 +} + +func (m *Simple) GetOUint32() uint32 { + if m != nil && m.OUint32 != nil { + return *m.OUint32 + } + return 0 +} + +func (m *Simple) GetOUint64() uint64 { + if m != nil && m.OUint64 != nil { + return *m.OUint64 + } + return 0 +} + +func (m *Simple) GetOSint32() int32 { + if m != nil && m.OSint32 != nil { + return *m.OSint32 + } + return 0 +} + +func (m *Simple) GetOSint64() int64 { + if m != nil && m.OSint64 != nil { + return *m.OSint64 + } + return 0 +} + +func (m *Simple) GetOFloat() float32 { + if m != nil && m.OFloat != nil { + return *m.OFloat + } + return 0 +} + +func (m *Simple) GetODouble() float64 { + if m != nil && m.ODouble != nil { + return *m.ODouble + } + return 0 +} + +func (m *Simple) GetOString() string { + if m != nil && m.OString != nil { + return *m.OString + } + return "" +} + +func (m *Simple) GetOBytes() []byte { + if m != nil { + return m.OBytes + } + return nil +} + +// Test message for holding special non-finites primitives. +type NonFinites struct { + FNan *float32 `protobuf:"fixed32,1,opt,name=f_nan,json=fNan" json:"f_nan,omitempty"` + FPinf *float32 `protobuf:"fixed32,2,opt,name=f_pinf,json=fPinf" json:"f_pinf,omitempty"` + FNinf *float32 `protobuf:"fixed32,3,opt,name=f_ninf,json=fNinf" json:"f_ninf,omitempty"` + DNan *float64 `protobuf:"fixed64,4,opt,name=d_nan,json=dNan" json:"d_nan,omitempty"` + DPinf *float64 `protobuf:"fixed64,5,opt,name=d_pinf,json=dPinf" json:"d_pinf,omitempty"` + DNinf *float64 `protobuf:"fixed64,6,opt,name=d_ninf,json=dNinf" json:"d_ninf,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *NonFinites) Reset() { *m = NonFinites{} } +func (m *NonFinites) String() string { return proto.CompactTextString(m) } +func (*NonFinites) ProtoMessage() {} +func (*NonFinites) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{1} } + +func (m *NonFinites) GetFNan() float32 { + if m != nil && m.FNan != nil { + return *m.FNan + } + return 0 +} + +func (m *NonFinites) GetFPinf() float32 { + if m != nil && m.FPinf != nil { + return *m.FPinf + } + return 0 +} + +func (m *NonFinites) GetFNinf() float32 { + if m != nil && m.FNinf != nil { + return *m.FNinf + } + return 0 +} + +func (m *NonFinites) GetDNan() float64 { + if m != nil && m.DNan != nil { + return *m.DNan + } + return 0 +} + +func (m *NonFinites) GetDPinf() float64 { + if m != nil && m.DPinf != nil { + return *m.DPinf + } + return 0 +} + +func (m *NonFinites) GetDNinf() float64 { + if m != nil && m.DNinf != nil { + return *m.DNinf + } + return 0 +} + +// Test message for holding repeated primitives. +type Repeats struct { + RBool []bool `protobuf:"varint,1,rep,name=r_bool,json=rBool" json:"r_bool,omitempty"` + RInt32 []int32 `protobuf:"varint,2,rep,name=r_int32,json=rInt32" json:"r_int32,omitempty"` + RInt64 []int64 `protobuf:"varint,3,rep,name=r_int64,json=rInt64" json:"r_int64,omitempty"` + RUint32 []uint32 `protobuf:"varint,4,rep,name=r_uint32,json=rUint32" json:"r_uint32,omitempty"` + RUint64 []uint64 `protobuf:"varint,5,rep,name=r_uint64,json=rUint64" json:"r_uint64,omitempty"` + RSint32 []int32 `protobuf:"zigzag32,6,rep,name=r_sint32,json=rSint32" json:"r_sint32,omitempty"` + RSint64 []int64 `protobuf:"zigzag64,7,rep,name=r_sint64,json=rSint64" json:"r_sint64,omitempty"` + RFloat []float32 `protobuf:"fixed32,8,rep,name=r_float,json=rFloat" json:"r_float,omitempty"` + RDouble []float64 `protobuf:"fixed64,9,rep,name=r_double,json=rDouble" json:"r_double,omitempty"` + RString []string `protobuf:"bytes,10,rep,name=r_string,json=rString" json:"r_string,omitempty"` + RBytes [][]byte `protobuf:"bytes,11,rep,name=r_bytes,json=rBytes" json:"r_bytes,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *Repeats) Reset() { *m = Repeats{} } +func (m *Repeats) String() string { return proto.CompactTextString(m) } +func (*Repeats) ProtoMessage() {} +func (*Repeats) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{2} } + +func (m *Repeats) GetRBool() []bool { + if m != nil { + return m.RBool + } + return nil +} + +func (m *Repeats) GetRInt32() []int32 { + if m != nil { + return m.RInt32 + } + return nil +} + +func (m *Repeats) GetRInt64() []int64 { + if m != nil { + return m.RInt64 + } + return nil +} + +func (m *Repeats) GetRUint32() []uint32 { + if m != nil { + return m.RUint32 + } + return nil +} + +func (m *Repeats) GetRUint64() []uint64 { + if m != nil { + return m.RUint64 + } + return nil +} + +func (m *Repeats) GetRSint32() []int32 { + if m != nil { + return m.RSint32 + } + return nil +} + +func (m *Repeats) GetRSint64() []int64 { + if m != nil { + return m.RSint64 + } + return nil +} + +func (m *Repeats) GetRFloat() []float32 { + if m != nil { + return m.RFloat + } + return nil +} + +func (m *Repeats) GetRDouble() []float64 { + if m != nil { + return m.RDouble + } + return nil +} + +func (m *Repeats) GetRString() []string { + if m != nil { + return m.RString + } + return nil +} + +func (m *Repeats) GetRBytes() [][]byte { + if m != nil { + return m.RBytes + } + return nil +} + +// Test message for holding enums and nested messages. +type Widget struct { + Color *Widget_Color `protobuf:"varint,1,opt,name=color,enum=jsonpb.Widget_Color" json:"color,omitempty"` + RColor []Widget_Color `protobuf:"varint,2,rep,name=r_color,json=rColor,enum=jsonpb.Widget_Color" json:"r_color,omitempty"` + Simple *Simple `protobuf:"bytes,10,opt,name=simple" json:"simple,omitempty"` + RSimple []*Simple `protobuf:"bytes,11,rep,name=r_simple,json=rSimple" json:"r_simple,omitempty"` + Repeats *Repeats `protobuf:"bytes,20,opt,name=repeats" json:"repeats,omitempty"` + RRepeats []*Repeats `protobuf:"bytes,21,rep,name=r_repeats,json=rRepeats" json:"r_repeats,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *Widget) Reset() { *m = Widget{} } +func (m *Widget) String() string { return proto.CompactTextString(m) } +func (*Widget) ProtoMessage() {} +func (*Widget) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{3} } + +func (m *Widget) GetColor() Widget_Color { + if m != nil && m.Color != nil { + return *m.Color + } + return Widget_RED +} + +func (m *Widget) GetRColor() []Widget_Color { + if m != nil { + return m.RColor + } + return nil +} + +func (m *Widget) GetSimple() *Simple { + if m != nil { + return m.Simple + } + return nil +} + +func (m *Widget) GetRSimple() []*Simple { + if m != nil { + return m.RSimple + } + return nil +} + +func (m *Widget) GetRepeats() *Repeats { + if m != nil { + return m.Repeats + } + return nil +} + +func (m *Widget) GetRRepeats() []*Repeats { + if m != nil { + return m.RRepeats + } + return nil +} + +type Maps struct { + MInt64Str map[int64]string `protobuf:"bytes,1,rep,name=m_int64_str,json=mInt64Str" json:"m_int64_str,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + MBoolSimple map[bool]*Simple `protobuf:"bytes,2,rep,name=m_bool_simple,json=mBoolSimple" json:"m_bool_simple,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *Maps) Reset() { *m = Maps{} } +func (m *Maps) String() string { return proto.CompactTextString(m) } +func (*Maps) ProtoMessage() {} +func (*Maps) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{4} } + +func (m *Maps) GetMInt64Str() map[int64]string { + if m != nil { + return m.MInt64Str + } + return nil +} + +func (m *Maps) GetMBoolSimple() map[bool]*Simple { + if m != nil { + return m.MBoolSimple + } + return nil +} + +type MsgWithOneof struct { + // Types that are valid to be assigned to Union: + // *MsgWithOneof_Title + // *MsgWithOneof_Salary + // *MsgWithOneof_Country + // *MsgWithOneof_HomeAddress + Union isMsgWithOneof_Union `protobuf_oneof:"union"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *MsgWithOneof) Reset() { *m = MsgWithOneof{} } +func (m *MsgWithOneof) String() string { return proto.CompactTextString(m) } +func (*MsgWithOneof) ProtoMessage() {} +func (*MsgWithOneof) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{5} } + +type isMsgWithOneof_Union interface { + isMsgWithOneof_Union() +} + +type MsgWithOneof_Title struct { + Title string `protobuf:"bytes,1,opt,name=title,oneof"` +} +type MsgWithOneof_Salary struct { + Salary int64 `protobuf:"varint,2,opt,name=salary,oneof"` +} +type MsgWithOneof_Country struct { + Country string `protobuf:"bytes,3,opt,name=Country,oneof"` +} +type MsgWithOneof_HomeAddress struct { + HomeAddress string `protobuf:"bytes,4,opt,name=home_address,json=homeAddress,oneof"` +} + +func (*MsgWithOneof_Title) isMsgWithOneof_Union() {} +func (*MsgWithOneof_Salary) isMsgWithOneof_Union() {} +func (*MsgWithOneof_Country) isMsgWithOneof_Union() {} +func (*MsgWithOneof_HomeAddress) isMsgWithOneof_Union() {} + +func (m *MsgWithOneof) GetUnion() isMsgWithOneof_Union { + if m != nil { + return m.Union + } + return nil +} + +func (m *MsgWithOneof) GetTitle() string { + if x, ok := m.GetUnion().(*MsgWithOneof_Title); ok { + return x.Title + } + return "" +} + +func (m *MsgWithOneof) GetSalary() int64 { + if x, ok := m.GetUnion().(*MsgWithOneof_Salary); ok { + return x.Salary + } + return 0 +} + +func (m *MsgWithOneof) GetCountry() string { + if x, ok := m.GetUnion().(*MsgWithOneof_Country); ok { + return x.Country + } + return "" +} + +func (m *MsgWithOneof) GetHomeAddress() string { + if x, ok := m.GetUnion().(*MsgWithOneof_HomeAddress); ok { + return x.HomeAddress + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*MsgWithOneof) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _MsgWithOneof_OneofMarshaler, _MsgWithOneof_OneofUnmarshaler, _MsgWithOneof_OneofSizer, []interface{}{ + (*MsgWithOneof_Title)(nil), + (*MsgWithOneof_Salary)(nil), + (*MsgWithOneof_Country)(nil), + (*MsgWithOneof_HomeAddress)(nil), + } +} + +func _MsgWithOneof_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*MsgWithOneof) + // union + switch x := m.Union.(type) { + case *MsgWithOneof_Title: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Title) + case *MsgWithOneof_Salary: + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Salary)) + case *MsgWithOneof_Country: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Country) + case *MsgWithOneof_HomeAddress: + b.EncodeVarint(4<<3 | proto.WireBytes) + b.EncodeStringBytes(x.HomeAddress) + case nil: + default: + return fmt.Errorf("MsgWithOneof.Union has unexpected type %T", x) + } + return nil +} + +func _MsgWithOneof_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*MsgWithOneof) + switch tag { + case 1: // union.title + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Union = &MsgWithOneof_Title{x} + return true, err + case 2: // union.salary + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Union = &MsgWithOneof_Salary{int64(x)} + return true, err + case 3: // union.Country + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Union = &MsgWithOneof_Country{x} + return true, err + case 4: // union.home_address + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Union = &MsgWithOneof_HomeAddress{x} + return true, err + default: + return false, nil + } +} + +func _MsgWithOneof_OneofSizer(msg proto.Message) (n int) { + m := msg.(*MsgWithOneof) + // union + switch x := m.Union.(type) { + case *MsgWithOneof_Title: + n += proto.SizeVarint(1<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(len(x.Title))) + n += len(x.Title) + case *MsgWithOneof_Salary: + n += proto.SizeVarint(2<<3 | proto.WireVarint) + n += proto.SizeVarint(uint64(x.Salary)) + case *MsgWithOneof_Country: + n += proto.SizeVarint(3<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(len(x.Country))) + n += len(x.Country) + case *MsgWithOneof_HomeAddress: + n += proto.SizeVarint(4<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(len(x.HomeAddress))) + n += len(x.HomeAddress) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type Real struct { + Value *float64 `protobuf:"fixed64,1,opt,name=value" json:"value,omitempty"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *Real) Reset() { *m = Real{} } +func (m *Real) String() string { return proto.CompactTextString(m) } +func (*Real) ProtoMessage() {} +func (*Real) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{6} } + +var extRange_Real = []proto.ExtensionRange{ + {100, 536870911}, +} + +func (*Real) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_Real +} + +func (m *Real) GetValue() float64 { + if m != nil && m.Value != nil { + return *m.Value + } + return 0 +} + +type Complex struct { + Imaginary *float64 `protobuf:"fixed64,1,opt,name=imaginary" json:"imaginary,omitempty"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *Complex) Reset() { *m = Complex{} } +func (m *Complex) String() string { return proto.CompactTextString(m) } +func (*Complex) ProtoMessage() {} +func (*Complex) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{7} } + +var extRange_Complex = []proto.ExtensionRange{ + {100, 536870911}, +} + +func (*Complex) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_Complex +} + +func (m *Complex) GetImaginary() float64 { + if m != nil && m.Imaginary != nil { + return *m.Imaginary + } + return 0 +} + +var E_Complex_RealExtension = &proto.ExtensionDesc{ + ExtendedType: (*Real)(nil), + ExtensionType: (*Complex)(nil), + Field: 123, + Name: "jsonpb.Complex.real_extension", + Tag: "bytes,123,opt,name=real_extension,json=realExtension", + Filename: "test_objects.proto", +} + +type KnownTypes struct { + An *google_protobuf.Any `protobuf:"bytes,14,opt,name=an" json:"an,omitempty"` + Dur *google_protobuf1.Duration `protobuf:"bytes,1,opt,name=dur" json:"dur,omitempty"` + St *google_protobuf2.Struct `protobuf:"bytes,12,opt,name=st" json:"st,omitempty"` + Ts *google_protobuf3.Timestamp `protobuf:"bytes,2,opt,name=ts" json:"ts,omitempty"` + Lv *google_protobuf2.ListValue `protobuf:"bytes,15,opt,name=lv" json:"lv,omitempty"` + Val *google_protobuf2.Value `protobuf:"bytes,16,opt,name=val" json:"val,omitempty"` + Dbl *google_protobuf4.DoubleValue `protobuf:"bytes,3,opt,name=dbl" json:"dbl,omitempty"` + Flt *google_protobuf4.FloatValue `protobuf:"bytes,4,opt,name=flt" json:"flt,omitempty"` + I64 *google_protobuf4.Int64Value `protobuf:"bytes,5,opt,name=i64" json:"i64,omitempty"` + U64 *google_protobuf4.UInt64Value `protobuf:"bytes,6,opt,name=u64" json:"u64,omitempty"` + I32 *google_protobuf4.Int32Value `protobuf:"bytes,7,opt,name=i32" json:"i32,omitempty"` + U32 *google_protobuf4.UInt32Value `protobuf:"bytes,8,opt,name=u32" json:"u32,omitempty"` + Bool *google_protobuf4.BoolValue `protobuf:"bytes,9,opt,name=bool" json:"bool,omitempty"` + Str *google_protobuf4.StringValue `protobuf:"bytes,10,opt,name=str" json:"str,omitempty"` + Bytes *google_protobuf4.BytesValue `protobuf:"bytes,11,opt,name=bytes" json:"bytes,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *KnownTypes) Reset() { *m = KnownTypes{} } +func (m *KnownTypes) String() string { return proto.CompactTextString(m) } +func (*KnownTypes) ProtoMessage() {} +func (*KnownTypes) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{8} } + +func (m *KnownTypes) GetAn() *google_protobuf.Any { + if m != nil { + return m.An + } + return nil +} + +func (m *KnownTypes) GetDur() *google_protobuf1.Duration { + if m != nil { + return m.Dur + } + return nil +} + +func (m *KnownTypes) GetSt() *google_protobuf2.Struct { + if m != nil { + return m.St + } + return nil +} + +func (m *KnownTypes) GetTs() *google_protobuf3.Timestamp { + if m != nil { + return m.Ts + } + return nil +} + +func (m *KnownTypes) GetLv() *google_protobuf2.ListValue { + if m != nil { + return m.Lv + } + return nil +} + +func (m *KnownTypes) GetVal() *google_protobuf2.Value { + if m != nil { + return m.Val + } + return nil +} + +func (m *KnownTypes) GetDbl() *google_protobuf4.DoubleValue { + if m != nil { + return m.Dbl + } + return nil +} + +func (m *KnownTypes) GetFlt() *google_protobuf4.FloatValue { + if m != nil { + return m.Flt + } + return nil +} + +func (m *KnownTypes) GetI64() *google_protobuf4.Int64Value { + if m != nil { + return m.I64 + } + return nil +} + +func (m *KnownTypes) GetU64() *google_protobuf4.UInt64Value { + if m != nil { + return m.U64 + } + return nil +} + +func (m *KnownTypes) GetI32() *google_protobuf4.Int32Value { + if m != nil { + return m.I32 + } + return nil +} + +func (m *KnownTypes) GetU32() *google_protobuf4.UInt32Value { + if m != nil { + return m.U32 + } + return nil +} + +func (m *KnownTypes) GetBool() *google_protobuf4.BoolValue { + if m != nil { + return m.Bool + } + return nil +} + +func (m *KnownTypes) GetStr() *google_protobuf4.StringValue { + if m != nil { + return m.Str + } + return nil +} + +func (m *KnownTypes) GetBytes() *google_protobuf4.BytesValue { + if m != nil { + return m.Bytes + } + return nil +} + +var E_Name = &proto.ExtensionDesc{ + ExtendedType: (*Real)(nil), + ExtensionType: (*string)(nil), + Field: 124, + Name: "jsonpb.name", + Tag: "bytes,124,opt,name=name", + Filename: "test_objects.proto", +} + +func init() { + proto.RegisterType((*Simple)(nil), "jsonpb.Simple") + proto.RegisterType((*NonFinites)(nil), "jsonpb.NonFinites") + proto.RegisterType((*Repeats)(nil), "jsonpb.Repeats") + proto.RegisterType((*Widget)(nil), "jsonpb.Widget") + proto.RegisterType((*Maps)(nil), "jsonpb.Maps") + proto.RegisterType((*MsgWithOneof)(nil), "jsonpb.MsgWithOneof") + proto.RegisterType((*Real)(nil), "jsonpb.Real") + proto.RegisterType((*Complex)(nil), "jsonpb.Complex") + proto.RegisterType((*KnownTypes)(nil), "jsonpb.KnownTypes") + proto.RegisterEnum("jsonpb.Widget_Color", Widget_Color_name, Widget_Color_value) + proto.RegisterExtension(E_Complex_RealExtension) + proto.RegisterExtension(E_Name) +} + +func init() { proto.RegisterFile("test_objects.proto", fileDescriptor1) } + +var fileDescriptor1 = []byte{ + // 1160 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x95, 0x41, 0x73, 0xdb, 0x44, + 0x14, 0xc7, 0x23, 0xc9, 0x92, 0xed, 0x75, 0x92, 0x9a, 0x6d, 0xda, 0x2a, 0x26, 0x80, 0xc6, 0x94, + 0x22, 0x0a, 0x75, 0x07, 0xc7, 0xe3, 0x61, 0x0a, 0x97, 0xa4, 0x71, 0x29, 0x43, 0x13, 0x98, 0x4d, + 0x43, 0x8f, 0x1e, 0x39, 0x5a, 0xbb, 0x2a, 0xf2, 0xae, 0x67, 0x77, 0x95, 0xd4, 0x03, 0x87, 0x9c, + 0x39, 0x32, 0x7c, 0x05, 0xf8, 0x08, 0x1c, 0xf8, 0x74, 0xcc, 0xdb, 0x95, 0xac, 0xc4, 0x8e, 0x4f, + 0xf1, 0x7b, 0xef, 0xff, 0xfe, 0x59, 0xed, 0x6f, 0x77, 0x1f, 0xc2, 0x8a, 0x4a, 0x35, 0xe4, 0xa3, + 0x77, 0xf4, 0x5c, 0xc9, 0xce, 0x4c, 0x70, 0xc5, 0xb1, 0xf7, 0x4e, 0x72, 0x36, 0x1b, 0xb5, 0x76, + 0x27, 0x9c, 0x4f, 0x52, 0xfa, 0x54, 0x67, 0x47, 0xd9, 0xf8, 0x69, 0xc4, 0xe6, 0x46, 0xd2, 0xfa, + 0x78, 0xb9, 0x14, 0x67, 0x22, 0x52, 0x09, 0x67, 0x79, 0x7d, 0x6f, 0xb9, 0x2e, 0x95, 0xc8, 0xce, + 0x55, 0x5e, 0xfd, 0x64, 0xb9, 0xaa, 0x92, 0x29, 0x95, 0x2a, 0x9a, 0xce, 0xd6, 0xd9, 0x5f, 0x8a, + 0x68, 0x36, 0xa3, 0x22, 0x5f, 0x61, 0xfb, 0x6f, 0x1b, 0x79, 0xa7, 0xc9, 0x74, 0x96, 0x52, 0x7c, + 0x0f, 0x79, 0x7c, 0x38, 0xe2, 0x3c, 0xf5, 0xad, 0xc0, 0x0a, 0x6b, 0xc4, 0xe5, 0x87, 0x9c, 0xa7, + 0xf8, 0x01, 0xaa, 0xf2, 0x61, 0xc2, 0xd4, 0x7e, 0xd7, 0xb7, 0x03, 0x2b, 0x74, 0x89, 0xc7, 0x7f, + 0x80, 0x68, 0x51, 0xe8, 0xf7, 0x7c, 0x27, 0xb0, 0x42, 0xc7, 0x14, 0xfa, 0x3d, 0xbc, 0x8b, 0x6a, + 0x7c, 0x98, 0x99, 0x96, 0x4a, 0x60, 0x85, 0x5b, 0xa4, 0xca, 0xcf, 0x74, 0x58, 0x96, 0xfa, 0x3d, + 0xdf, 0x0d, 0xac, 0xb0, 0x92, 0x97, 0x8a, 0x2e, 0x69, 0xba, 0xbc, 0xc0, 0x0a, 0x3f, 0x20, 0x55, + 0x7e, 0x7a, 0xad, 0x4b, 0x9a, 0xae, 0x6a, 0x60, 0x85, 0x38, 0x2f, 0xf5, 0x7b, 0x66, 0x11, 0xe3, + 0x94, 0x47, 0xca, 0xaf, 0x05, 0x56, 0x68, 0x13, 0x8f, 0xbf, 0x80, 0xc8, 0xf4, 0xc4, 0x3c, 0x1b, + 0xa5, 0xd4, 0xaf, 0x07, 0x56, 0x68, 0x91, 0x2a, 0x3f, 0xd2, 0x61, 0x6e, 0xa7, 0x44, 0xc2, 0x26, + 0x3e, 0x0a, 0xac, 0xb0, 0x0e, 0x76, 0x3a, 0x34, 0x76, 0xa3, 0xb9, 0xa2, 0xd2, 0x6f, 0x04, 0x56, + 0xb8, 0x49, 0x3c, 0x7e, 0x08, 0x51, 0xfb, 0x4f, 0x0b, 0xa1, 0x13, 0xce, 0x5e, 0x24, 0x2c, 0x51, + 0x54, 0xe2, 0xbb, 0xc8, 0x1d, 0x0f, 0x59, 0xc4, 0xf4, 0x56, 0xd9, 0xa4, 0x32, 0x3e, 0x89, 0x18, + 0x6c, 0xe0, 0x78, 0x38, 0x4b, 0xd8, 0x58, 0x6f, 0x94, 0x4d, 0xdc, 0xf1, 0xcf, 0x09, 0x1b, 0x9b, + 0x34, 0x83, 0xb4, 0x93, 0xa7, 0x4f, 0x20, 0x7d, 0x17, 0xb9, 0xb1, 0xb6, 0xa8, 0xe8, 0xd5, 0x55, + 0xe2, 0xdc, 0x22, 0x36, 0x16, 0xae, 0xce, 0xba, 0x71, 0x61, 0x11, 0x1b, 0x0b, 0x2f, 0x4f, 0x83, + 0x45, 0xfb, 0x1f, 0x1b, 0x55, 0x09, 0x9d, 0xd1, 0x48, 0x49, 0x90, 0x88, 0x82, 0x9e, 0x03, 0xf4, + 0x44, 0x41, 0x4f, 0x2c, 0xe8, 0x39, 0x40, 0x4f, 0x2c, 0xe8, 0x89, 0x05, 0x3d, 0x07, 0xe8, 0x89, + 0x05, 0x3d, 0x51, 0xd2, 0x73, 0x80, 0x9e, 0x28, 0xe9, 0x89, 0x92, 0x9e, 0x03, 0xf4, 0x44, 0x49, + 0x4f, 0x94, 0xf4, 0x1c, 0xa0, 0x27, 0x4e, 0xaf, 0x75, 0x2d, 0xe8, 0x39, 0x40, 0x4f, 0x94, 0xf4, + 0xc4, 0x82, 0x9e, 0x03, 0xf4, 0xc4, 0x82, 0x9e, 0x28, 0xe9, 0x39, 0x40, 0x4f, 0x94, 0xf4, 0x44, + 0x49, 0xcf, 0x01, 0x7a, 0xa2, 0xa4, 0x27, 0x16, 0xf4, 0x1c, 0xa0, 0x27, 0x0c, 0xbd, 0x7f, 0x6d, + 0xe4, 0xbd, 0x49, 0xe2, 0x09, 0x55, 0xf8, 0x31, 0x72, 0xcf, 0x79, 0xca, 0x85, 0x26, 0xb7, 0xdd, + 0xdd, 0xe9, 0x98, 0x2b, 0xda, 0x31, 0xe5, 0xce, 0x73, 0xa8, 0x11, 0x23, 0xc1, 0x4f, 0xc0, 0xcf, + 0xa8, 0x61, 0xf3, 0xd6, 0xa9, 0x3d, 0xa1, 0xff, 0xe2, 0x47, 0xc8, 0x93, 0xfa, 0x2a, 0xe9, 0x53, + 0xd5, 0xe8, 0x6e, 0x17, 0x6a, 0x73, 0xc1, 0x48, 0x5e, 0xc5, 0x5f, 0x98, 0x0d, 0xd1, 0x4a, 0x58, + 0xe7, 0xaa, 0x12, 0x36, 0x28, 0x97, 0x56, 0x85, 0x01, 0xec, 0xef, 0x68, 0xcf, 0x3b, 0x85, 0x32, + 0xe7, 0x4e, 0x8a, 0x3a, 0xfe, 0x0a, 0xd5, 0xc5, 0xb0, 0x10, 0xdf, 0xd3, 0xb6, 0x2b, 0xe2, 0x9a, + 0xc8, 0x7f, 0xb5, 0x3f, 0x43, 0xae, 0x59, 0x74, 0x15, 0x39, 0x64, 0x70, 0xd4, 0xdc, 0xc0, 0x75, + 0xe4, 0x7e, 0x4f, 0x06, 0x83, 0x93, 0xa6, 0x85, 0x6b, 0xa8, 0x72, 0xf8, 0xea, 0x6c, 0xd0, 0xb4, + 0xdb, 0x7f, 0xd9, 0xa8, 0x72, 0x1c, 0xcd, 0x24, 0xfe, 0x16, 0x35, 0xa6, 0xe6, 0xb8, 0xc0, 0xde, + 0xeb, 0x33, 0xd6, 0xe8, 0x7e, 0x58, 0xf8, 0x83, 0xa4, 0x73, 0xac, 0xcf, 0xcf, 0xa9, 0x12, 0x03, + 0xa6, 0xc4, 0x9c, 0xd4, 0xa7, 0x45, 0x8c, 0x0f, 0xd0, 0xd6, 0x54, 0x9f, 0xcd, 0xe2, 0xab, 0x6d, + 0xdd, 0xfe, 0xd1, 0xcd, 0x76, 0x38, 0xaf, 0xe6, 0xb3, 0x8d, 0x41, 0x63, 0x5a, 0x66, 0x5a, 0xdf, + 0xa1, 0xed, 0x9b, 0xfe, 0xb8, 0x89, 0x9c, 0x5f, 0xe9, 0x5c, 0x63, 0x74, 0x08, 0xfc, 0xc4, 0x3b, + 0xc8, 0xbd, 0x88, 0xd2, 0x8c, 0xea, 0xeb, 0x57, 0x27, 0x26, 0x78, 0x66, 0x7f, 0x63, 0xb5, 0x4e, + 0x50, 0x73, 0xd9, 0xfe, 0x7a, 0x7f, 0xcd, 0xf4, 0x3f, 0xbc, 0xde, 0xbf, 0x0a, 0xa5, 0xf4, 0x6b, + 0xff, 0x61, 0xa1, 0xcd, 0x63, 0x39, 0x79, 0x93, 0xa8, 0xb7, 0x3f, 0x31, 0xca, 0xc7, 0xf8, 0x3e, + 0x72, 0x55, 0xa2, 0x52, 0xaa, 0xed, 0xea, 0x2f, 0x37, 0x88, 0x09, 0xb1, 0x8f, 0x3c, 0x19, 0xa5, + 0x91, 0x98, 0x6b, 0x4f, 0xe7, 0xe5, 0x06, 0xc9, 0x63, 0xdc, 0x42, 0xd5, 0xe7, 0x3c, 0x83, 0x95, + 0xe8, 0x67, 0x01, 0x7a, 0x8a, 0x04, 0xfe, 0x14, 0x6d, 0xbe, 0xe5, 0x53, 0x3a, 0x8c, 0xe2, 0x58, + 0x50, 0x29, 0xf5, 0x0b, 0x01, 0x82, 0x06, 0x64, 0x0f, 0x4c, 0xf2, 0xb0, 0x8a, 0xdc, 0x8c, 0x25, + 0x9c, 0xb5, 0x1f, 0xa1, 0x0a, 0xa1, 0x51, 0x5a, 0x7e, 0xbe, 0x65, 0xde, 0x08, 0x1d, 0x3c, 0xae, + 0xd5, 0xe2, 0xe6, 0xd5, 0xd5, 0xd5, 0x95, 0xdd, 0xbe, 0x84, 0xff, 0x08, 0x5f, 0xf2, 0x1e, 0xef, + 0xa1, 0x7a, 0x32, 0x8d, 0x26, 0x09, 0x83, 0x95, 0x19, 0x79, 0x99, 0x28, 0x5b, 0xba, 0x47, 0x68, + 0x5b, 0xd0, 0x28, 0x1d, 0xd2, 0xf7, 0x8a, 0x32, 0x99, 0x70, 0x86, 0x37, 0xcb, 0x23, 0x15, 0xa5, + 0xfe, 0x6f, 0x37, 0xcf, 0x64, 0x6e, 0x4f, 0xb6, 0xa0, 0x69, 0x50, 0xf4, 0xb4, 0xff, 0x73, 0x11, + 0xfa, 0x91, 0xf1, 0x4b, 0xf6, 0x7a, 0x3e, 0xa3, 0x12, 0x3f, 0x44, 0x76, 0xc4, 0xfc, 0x6d, 0xdd, + 0xba, 0xd3, 0x31, 0xf3, 0xa9, 0x53, 0xcc, 0xa7, 0xce, 0x01, 0x9b, 0x13, 0x3b, 0x62, 0xf8, 0x4b, + 0xe4, 0xc4, 0x99, 0xb9, 0xa5, 0x8d, 0xee, 0xee, 0x8a, 0xec, 0x28, 0x9f, 0x92, 0x04, 0x54, 0xf8, + 0x73, 0x64, 0x4b, 0xe5, 0x6f, 0x6a, 0xed, 0x83, 0x15, 0xed, 0xa9, 0x9e, 0x98, 0xc4, 0x96, 0x70, + 0xfb, 0x6d, 0x25, 0x73, 0xbe, 0xad, 0x15, 0xe1, 0xeb, 0x62, 0x78, 0x12, 0x5b, 0x49, 0xd0, 0xa6, + 0x17, 0xfe, 0x9d, 0x35, 0xda, 0x57, 0x89, 0x54, 0xbf, 0xc0, 0x0e, 0x13, 0x3b, 0xbd, 0xc0, 0x21, + 0x72, 0x2e, 0xa2, 0xd4, 0x6f, 0x6a, 0xf1, 0xfd, 0x15, 0xb1, 0x11, 0x82, 0x04, 0x77, 0x90, 0x13, + 0x8f, 0x52, 0xcd, 0xbc, 0xd1, 0xdd, 0x5b, 0xfd, 0x2e, 0xfd, 0xc8, 0xe5, 0xfa, 0x78, 0x94, 0xe2, + 0x27, 0xc8, 0x19, 0xa7, 0x4a, 0x1f, 0x01, 0xb8, 0x70, 0xcb, 0x7a, 0xfd, 0x5c, 0xe6, 0xf2, 0x71, + 0xaa, 0x40, 0x9e, 0xe4, 0xb3, 0xf5, 0x36, 0xb9, 0xbe, 0x42, 0xb9, 0x3c, 0xe9, 0xf7, 0x60, 0x35, + 0x59, 0xbf, 0xa7, 0xa7, 0xca, 0x6d, 0xab, 0x39, 0xbb, 0xae, 0xcf, 0xfa, 0x3d, 0x6d, 0xbf, 0xdf, + 0xd5, 0x43, 0x78, 0x8d, 0xfd, 0x7e, 0xb7, 0xb0, 0xdf, 0xef, 0x6a, 0xfb, 0xfd, 0xae, 0x9e, 0xcc, + 0xeb, 0xec, 0x17, 0xfa, 0x4c, 0xeb, 0x2b, 0x7a, 0x84, 0xd5, 0xd7, 0x6c, 0x3a, 0xdc, 0x61, 0x23, + 0xd7, 0x3a, 0xf0, 0x87, 0xd7, 0x08, 0xad, 0xf1, 0x37, 0x63, 0x21, 0xf7, 0x97, 0x4a, 0xe0, 0xaf, + 0x91, 0x5b, 0x0e, 0xf7, 0xdb, 0x3e, 0x40, 0x8f, 0x0b, 0xd3, 0x60, 0x94, 0xcf, 0x02, 0x54, 0x61, + 0xd1, 0x94, 0x2e, 0x1d, 0xfc, 0xdf, 0xf5, 0x0b, 0xa3, 0x2b, 0xff, 0x07, 0x00, 0x00, 0xff, 0xff, + 0xd5, 0x39, 0x32, 0x09, 0xf9, 0x09, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/test_objects.proto b/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/test_objects.proto new file mode 100644 index 000000000..0d2fc1fad --- /dev/null +++ b/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto/test_objects.proto @@ -0,0 +1,147 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2015 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto2"; + +import "google/protobuf/any.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/struct.proto"; +import "google/protobuf/timestamp.proto"; +import "google/protobuf/wrappers.proto"; + +package jsonpb; + +// Test message for holding primitive types. +message Simple { + optional bool o_bool = 1; + optional int32 o_int32 = 2; + optional int64 o_int64 = 3; + optional uint32 o_uint32 = 4; + optional uint64 o_uint64 = 5; + optional sint32 o_sint32 = 6; + optional sint64 o_sint64 = 7; + optional float o_float = 8; + optional double o_double = 9; + optional string o_string = 10; + optional bytes o_bytes = 11; +} + +// Test message for holding special non-finites primitives. +message NonFinites { + optional float f_nan = 1; + optional float f_pinf = 2; + optional float f_ninf = 3; + optional double d_nan = 4; + optional double d_pinf = 5; + optional double d_ninf = 6; +} + +// Test message for holding repeated primitives. +message Repeats { + repeated bool r_bool = 1; + repeated int32 r_int32 = 2; + repeated int64 r_int64 = 3; + repeated uint32 r_uint32 = 4; + repeated uint64 r_uint64 = 5; + repeated sint32 r_sint32 = 6; + repeated sint64 r_sint64 = 7; + repeated float r_float = 8; + repeated double r_double = 9; + repeated string r_string = 10; + repeated bytes r_bytes = 11; +} + +// Test message for holding enums and nested messages. +message Widget { + enum Color { + RED = 0; + GREEN = 1; + BLUE = 2; + }; + optional Color color = 1; + repeated Color r_color = 2; + + optional Simple simple = 10; + repeated Simple r_simple = 11; + + optional Repeats repeats = 20; + repeated Repeats r_repeats = 21; +} + +message Maps { + map m_int64_str = 1; + map m_bool_simple = 2; +} + +message MsgWithOneof { + oneof union { + string title = 1; + int64 salary = 2; + string Country = 3; + string home_address = 4; + } +} + +message Real { + optional double value = 1; + extensions 100 to max; +} + +extend Real { + optional string name = 124; +} + +message Complex { + extend Real { + optional Complex real_extension = 123; + } + optional double imaginary = 1; + extensions 100 to max; +} + +message KnownTypes { + optional google.protobuf.Any an = 14; + optional google.protobuf.Duration dur = 1; + optional google.protobuf.Struct st = 12; + optional google.protobuf.Timestamp ts = 2; + optional google.protobuf.ListValue lv = 15; + optional google.protobuf.Value val = 16; + + optional google.protobuf.DoubleValue dbl = 3; + optional google.protobuf.FloatValue flt = 4; + optional google.protobuf.Int64Value i64 = 5; + optional google.protobuf.UInt64Value u64 = 6; + optional google.protobuf.Int32Value i32 = 7; + optional google.protobuf.UInt32Value u32 = 8; + optional google.protobuf.BoolValue bool = 9; + optional google.protobuf.StringValue str = 10; + optional google.protobuf.BytesValue bytes = 11; +} diff --git a/vendor/github.com/golang/protobuf/proto/Makefile b/vendor/github.com/golang/protobuf/proto/Makefile new file mode 100644 index 000000000..e2e0651a9 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/Makefile @@ -0,0 +1,43 @@ +# Go support for Protocol Buffers - Google's data interchange format +# +# Copyright 2010 The Go Authors. All rights reserved. +# https://github.com/golang/protobuf +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +install: + go install + +test: install generate-test-pbs + go test + + +generate-test-pbs: + make install + make -C testdata + protoc --go_out=Mtestdata/test.proto=github.com/golang/protobuf/proto/testdata,Mgoogle/protobuf/any.proto=github.com/golang/protobuf/ptypes/any:. proto3_proto/proto3.proto + make diff --git a/vendor/github.com/golang/protobuf/proto/all_test.go b/vendor/github.com/golang/protobuf/proto/all_test.go new file mode 100644 index 000000000..41451a407 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/all_test.go @@ -0,0 +1,2278 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2010 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package proto_test + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "math" + "math/rand" + "reflect" + "runtime/debug" + "strings" + "testing" + "time" + + . "github.com/golang/protobuf/proto" + . "github.com/golang/protobuf/proto/testdata" +) + +var globalO *Buffer + +func old() *Buffer { + if globalO == nil { + globalO = NewBuffer(nil) + } + globalO.Reset() + return globalO +} + +func equalbytes(b1, b2 []byte, t *testing.T) { + if len(b1) != len(b2) { + t.Errorf("wrong lengths: 2*%d != %d", len(b1), len(b2)) + return + } + for i := 0; i < len(b1); i++ { + if b1[i] != b2[i] { + t.Errorf("bad byte[%d]:%x %x: %s %s", i, b1[i], b2[i], b1, b2) + } + } +} + +func initGoTestField() *GoTestField { + f := new(GoTestField) + f.Label = String("label") + f.Type = String("type") + return f +} + +// These are all structurally equivalent but the tag numbers differ. +// (It's remarkable that required, optional, and repeated all have +// 8 letters.) +func initGoTest_RequiredGroup() *GoTest_RequiredGroup { + return &GoTest_RequiredGroup{ + RequiredField: String("required"), + } +} + +func initGoTest_OptionalGroup() *GoTest_OptionalGroup { + return &GoTest_OptionalGroup{ + RequiredField: String("optional"), + } +} + +func initGoTest_RepeatedGroup() *GoTest_RepeatedGroup { + return &GoTest_RepeatedGroup{ + RequiredField: String("repeated"), + } +} + +func initGoTest(setdefaults bool) *GoTest { + pb := new(GoTest) + if setdefaults { + pb.F_BoolDefaulted = Bool(Default_GoTest_F_BoolDefaulted) + pb.F_Int32Defaulted = Int32(Default_GoTest_F_Int32Defaulted) + pb.F_Int64Defaulted = Int64(Default_GoTest_F_Int64Defaulted) + pb.F_Fixed32Defaulted = Uint32(Default_GoTest_F_Fixed32Defaulted) + pb.F_Fixed64Defaulted = Uint64(Default_GoTest_F_Fixed64Defaulted) + pb.F_Uint32Defaulted = Uint32(Default_GoTest_F_Uint32Defaulted) + pb.F_Uint64Defaulted = Uint64(Default_GoTest_F_Uint64Defaulted) + pb.F_FloatDefaulted = Float32(Default_GoTest_F_FloatDefaulted) + pb.F_DoubleDefaulted = Float64(Default_GoTest_F_DoubleDefaulted) + pb.F_StringDefaulted = String(Default_GoTest_F_StringDefaulted) + pb.F_BytesDefaulted = Default_GoTest_F_BytesDefaulted + pb.F_Sint32Defaulted = Int32(Default_GoTest_F_Sint32Defaulted) + pb.F_Sint64Defaulted = Int64(Default_GoTest_F_Sint64Defaulted) + } + + pb.Kind = GoTest_TIME.Enum() + pb.RequiredField = initGoTestField() + pb.F_BoolRequired = Bool(true) + pb.F_Int32Required = Int32(3) + pb.F_Int64Required = Int64(6) + pb.F_Fixed32Required = Uint32(32) + pb.F_Fixed64Required = Uint64(64) + pb.F_Uint32Required = Uint32(3232) + pb.F_Uint64Required = Uint64(6464) + pb.F_FloatRequired = Float32(3232) + pb.F_DoubleRequired = Float64(6464) + pb.F_StringRequired = String("string") + pb.F_BytesRequired = []byte("bytes") + pb.F_Sint32Required = Int32(-32) + pb.F_Sint64Required = Int64(-64) + pb.Requiredgroup = initGoTest_RequiredGroup() + + return pb +} + +func fail(msg string, b *bytes.Buffer, s string, t *testing.T) { + data := b.Bytes() + ld := len(data) + ls := len(s) / 2 + + fmt.Printf("fail %s ld=%d ls=%d\n", msg, ld, ls) + + // find the interesting spot - n + n := ls + if ld < ls { + n = ld + } + j := 0 + for i := 0; i < n; i++ { + bs := hex(s[j])*16 + hex(s[j+1]) + j += 2 + if data[i] == bs { + continue + } + n = i + break + } + l := n - 10 + if l < 0 { + l = 0 + } + h := n + 10 + + // find the interesting spot - n + fmt.Printf("is[%d]:", l) + for i := l; i < h; i++ { + if i >= ld { + fmt.Printf(" --") + continue + } + fmt.Printf(" %.2x", data[i]) + } + fmt.Printf("\n") + + fmt.Printf("sb[%d]:", l) + for i := l; i < h; i++ { + if i >= ls { + fmt.Printf(" --") + continue + } + bs := hex(s[j])*16 + hex(s[j+1]) + j += 2 + fmt.Printf(" %.2x", bs) + } + fmt.Printf("\n") + + t.Fail() + + // t.Errorf("%s: \ngood: %s\nbad: %x", msg, s, b.Bytes()) + // Print the output in a partially-decoded format; can + // be helpful when updating the test. It produces the output + // that is pasted, with minor edits, into the argument to verify(). + // data := b.Bytes() + // nesting := 0 + // for b.Len() > 0 { + // start := len(data) - b.Len() + // var u uint64 + // u, err := DecodeVarint(b) + // if err != nil { + // fmt.Printf("decode error on varint:", err) + // return + // } + // wire := u & 0x7 + // tag := u >> 3 + // switch wire { + // case WireVarint: + // v, err := DecodeVarint(b) + // if err != nil { + // fmt.Printf("decode error on varint:", err) + // return + // } + // fmt.Printf("\t\t\"%x\" // field %d, encoding %d, value %d\n", + // data[start:len(data)-b.Len()], tag, wire, v) + // case WireFixed32: + // v, err := DecodeFixed32(b) + // if err != nil { + // fmt.Printf("decode error on fixed32:", err) + // return + // } + // fmt.Printf("\t\t\"%x\" // field %d, encoding %d, value %d\n", + // data[start:len(data)-b.Len()], tag, wire, v) + // case WireFixed64: + // v, err := DecodeFixed64(b) + // if err != nil { + // fmt.Printf("decode error on fixed64:", err) + // return + // } + // fmt.Printf("\t\t\"%x\" // field %d, encoding %d, value %d\n", + // data[start:len(data)-b.Len()], tag, wire, v) + // case WireBytes: + // nb, err := DecodeVarint(b) + // if err != nil { + // fmt.Printf("decode error on bytes:", err) + // return + // } + // after_tag := len(data) - b.Len() + // str := make([]byte, nb) + // _, err = b.Read(str) + // if err != nil { + // fmt.Printf("decode error on bytes:", err) + // return + // } + // fmt.Printf("\t\t\"%x\" \"%x\" // field %d, encoding %d (FIELD)\n", + // data[start:after_tag], str, tag, wire) + // case WireStartGroup: + // nesting++ + // fmt.Printf("\t\t\"%x\"\t\t// start group field %d level %d\n", + // data[start:len(data)-b.Len()], tag, nesting) + // case WireEndGroup: + // fmt.Printf("\t\t\"%x\"\t\t// end group field %d level %d\n", + // data[start:len(data)-b.Len()], tag, nesting) + // nesting-- + // default: + // fmt.Printf("unrecognized wire type %d\n", wire) + // return + // } + // } +} + +func hex(c uint8) uint8 { + if '0' <= c && c <= '9' { + return c - '0' + } + if 'a' <= c && c <= 'f' { + return 10 + c - 'a' + } + if 'A' <= c && c <= 'F' { + return 10 + c - 'A' + } + return 0 +} + +func equal(b []byte, s string, t *testing.T) bool { + if 2*len(b) != len(s) { + // fail(fmt.Sprintf("wrong lengths: 2*%d != %d", len(b), len(s)), b, s, t) + fmt.Printf("wrong lengths: 2*%d != %d\n", len(b), len(s)) + return false + } + for i, j := 0, 0; i < len(b); i, j = i+1, j+2 { + x := hex(s[j])*16 + hex(s[j+1]) + if b[i] != x { + // fail(fmt.Sprintf("bad byte[%d]:%x %x", i, b[i], x), b, s, t) + fmt.Printf("bad byte[%d]:%x %x", i, b[i], x) + return false + } + } + return true +} + +func overify(t *testing.T, pb *GoTest, expected string) { + o := old() + err := o.Marshal(pb) + if err != nil { + fmt.Printf("overify marshal-1 err = %v", err) + o.DebugPrint("", o.Bytes()) + t.Fatalf("expected = %s", expected) + } + if !equal(o.Bytes(), expected, t) { + o.DebugPrint("overify neq 1", o.Bytes()) + t.Fatalf("expected = %s", expected) + } + + // Now test Unmarshal by recreating the original buffer. + pbd := new(GoTest) + err = o.Unmarshal(pbd) + if err != nil { + t.Fatalf("overify unmarshal err = %v", err) + o.DebugPrint("", o.Bytes()) + t.Fatalf("string = %s", expected) + } + o.Reset() + err = o.Marshal(pbd) + if err != nil { + t.Errorf("overify marshal-2 err = %v", err) + o.DebugPrint("", o.Bytes()) + t.Fatalf("string = %s", expected) + } + if !equal(o.Bytes(), expected, t) { + o.DebugPrint("overify neq 2", o.Bytes()) + t.Fatalf("string = %s", expected) + } +} + +// Simple tests for numeric encode/decode primitives (varint, etc.) +func TestNumericPrimitives(t *testing.T) { + for i := uint64(0); i < 1e6; i += 111 { + o := old() + if o.EncodeVarint(i) != nil { + t.Error("EncodeVarint") + break + } + x, e := o.DecodeVarint() + if e != nil { + t.Fatal("DecodeVarint") + } + if x != i { + t.Fatal("varint decode fail:", i, x) + } + + o = old() + if o.EncodeFixed32(i) != nil { + t.Fatal("encFixed32") + } + x, e = o.DecodeFixed32() + if e != nil { + t.Fatal("decFixed32") + } + if x != i { + t.Fatal("fixed32 decode fail:", i, x) + } + + o = old() + if o.EncodeFixed64(i*1234567) != nil { + t.Error("encFixed64") + break + } + x, e = o.DecodeFixed64() + if e != nil { + t.Error("decFixed64") + break + } + if x != i*1234567 { + t.Error("fixed64 decode fail:", i*1234567, x) + break + } + + o = old() + i32 := int32(i - 12345) + if o.EncodeZigzag32(uint64(i32)) != nil { + t.Fatal("EncodeZigzag32") + } + x, e = o.DecodeZigzag32() + if e != nil { + t.Fatal("DecodeZigzag32") + } + if x != uint64(uint32(i32)) { + t.Fatal("zigzag32 decode fail:", i32, x) + } + + o = old() + i64 := int64(i - 12345) + if o.EncodeZigzag64(uint64(i64)) != nil { + t.Fatal("EncodeZigzag64") + } + x, e = o.DecodeZigzag64() + if e != nil { + t.Fatal("DecodeZigzag64") + } + if x != uint64(i64) { + t.Fatal("zigzag64 decode fail:", i64, x) + } + } +} + +// fakeMarshaler is a simple struct implementing Marshaler and Message interfaces. +type fakeMarshaler struct { + b []byte + err error +} + +func (f *fakeMarshaler) Marshal() ([]byte, error) { return f.b, f.err } +func (f *fakeMarshaler) String() string { return fmt.Sprintf("Bytes: %v Error: %v", f.b, f.err) } +func (f *fakeMarshaler) ProtoMessage() {} +func (f *fakeMarshaler) Reset() {} + +type msgWithFakeMarshaler struct { + M *fakeMarshaler `protobuf:"bytes,1,opt,name=fake"` +} + +func (m *msgWithFakeMarshaler) String() string { return CompactTextString(m) } +func (m *msgWithFakeMarshaler) ProtoMessage() {} +func (m *msgWithFakeMarshaler) Reset() {} + +// Simple tests for proto messages that implement the Marshaler interface. +func TestMarshalerEncoding(t *testing.T) { + tests := []struct { + name string + m Message + want []byte + errType reflect.Type + }{ + { + name: "Marshaler that fails", + m: &fakeMarshaler{ + err: errors.New("some marshal err"), + b: []byte{5, 6, 7}, + }, + // Since the Marshal method returned bytes, they should be written to the + // buffer. (For efficiency, we assume that Marshal implementations are + // always correct w.r.t. RequiredNotSetError and output.) + want: []byte{5, 6, 7}, + errType: reflect.TypeOf(errors.New("some marshal err")), + }, + { + name: "Marshaler that fails with RequiredNotSetError", + m: &msgWithFakeMarshaler{ + M: &fakeMarshaler{ + err: &RequiredNotSetError{}, + b: []byte{5, 6, 7}, + }, + }, + // Since there's an error that can be continued after, + // the buffer should be written. + want: []byte{ + 10, 3, // for &msgWithFakeMarshaler + 5, 6, 7, // for &fakeMarshaler + }, + errType: reflect.TypeOf(&RequiredNotSetError{}), + }, + { + name: "Marshaler that succeeds", + m: &fakeMarshaler{ + b: []byte{0, 1, 2, 3, 4, 127, 255}, + }, + want: []byte{0, 1, 2, 3, 4, 127, 255}, + }, + } + for _, test := range tests { + b := NewBuffer(nil) + err := b.Marshal(test.m) + if reflect.TypeOf(err) != test.errType { + t.Errorf("%s: got err %T(%v) wanted %T", test.name, err, err, test.errType) + } + if !reflect.DeepEqual(test.want, b.Bytes()) { + t.Errorf("%s: got bytes %v wanted %v", test.name, b.Bytes(), test.want) + } + if size := Size(test.m); size != len(b.Bytes()) { + t.Errorf("%s: Size(_) = %v, but marshaled to %v bytes", test.name, size, len(b.Bytes())) + } + + m, mErr := Marshal(test.m) + if !bytes.Equal(b.Bytes(), m) { + t.Errorf("%s: Marshal returned %v, but (*Buffer).Marshal wrote %v", test.name, m, b.Bytes()) + } + if !reflect.DeepEqual(err, mErr) { + t.Errorf("%s: Marshal err = %q, but (*Buffer).Marshal returned %q", + test.name, fmt.Sprint(mErr), fmt.Sprint(err)) + } + } +} + +// Simple tests for bytes +func TestBytesPrimitives(t *testing.T) { + o := old() + bytes := []byte{'n', 'o', 'w', ' ', 'i', 's', ' ', 't', 'h', 'e', ' ', 't', 'i', 'm', 'e'} + if o.EncodeRawBytes(bytes) != nil { + t.Error("EncodeRawBytes") + } + decb, e := o.DecodeRawBytes(false) + if e != nil { + t.Error("DecodeRawBytes") + } + equalbytes(bytes, decb, t) +} + +// Simple tests for strings +func TestStringPrimitives(t *testing.T) { + o := old() + s := "now is the time" + if o.EncodeStringBytes(s) != nil { + t.Error("enc_string") + } + decs, e := o.DecodeStringBytes() + if e != nil { + t.Error("dec_string") + } + if s != decs { + t.Error("string encode/decode fail:", s, decs) + } +} + +// Do we catch the "required bit not set" case? +func TestRequiredBit(t *testing.T) { + o := old() + pb := new(GoTest) + err := o.Marshal(pb) + if err == nil { + t.Error("did not catch missing required fields") + } else if strings.Index(err.Error(), "Kind") < 0 { + t.Error("wrong error type:", err) + } +} + +// Check that all fields are nil. +// Clearly silly, and a residue from a more interesting test with an earlier, +// different initialization property, but it once caught a compiler bug so +// it lives. +func checkInitialized(pb *GoTest, t *testing.T) { + if pb.F_BoolDefaulted != nil { + t.Error("New or Reset did not set boolean:", *pb.F_BoolDefaulted) + } + if pb.F_Int32Defaulted != nil { + t.Error("New or Reset did not set int32:", *pb.F_Int32Defaulted) + } + if pb.F_Int64Defaulted != nil { + t.Error("New or Reset did not set int64:", *pb.F_Int64Defaulted) + } + if pb.F_Fixed32Defaulted != nil { + t.Error("New or Reset did not set fixed32:", *pb.F_Fixed32Defaulted) + } + if pb.F_Fixed64Defaulted != nil { + t.Error("New or Reset did not set fixed64:", *pb.F_Fixed64Defaulted) + } + if pb.F_Uint32Defaulted != nil { + t.Error("New or Reset did not set uint32:", *pb.F_Uint32Defaulted) + } + if pb.F_Uint64Defaulted != nil { + t.Error("New or Reset did not set uint64:", *pb.F_Uint64Defaulted) + } + if pb.F_FloatDefaulted != nil { + t.Error("New or Reset did not set float:", *pb.F_FloatDefaulted) + } + if pb.F_DoubleDefaulted != nil { + t.Error("New or Reset did not set double:", *pb.F_DoubleDefaulted) + } + if pb.F_StringDefaulted != nil { + t.Error("New or Reset did not set string:", *pb.F_StringDefaulted) + } + if pb.F_BytesDefaulted != nil { + t.Error("New or Reset did not set bytes:", string(pb.F_BytesDefaulted)) + } + if pb.F_Sint32Defaulted != nil { + t.Error("New or Reset did not set int32:", *pb.F_Sint32Defaulted) + } + if pb.F_Sint64Defaulted != nil { + t.Error("New or Reset did not set int64:", *pb.F_Sint64Defaulted) + } +} + +// Does Reset() reset? +func TestReset(t *testing.T) { + pb := initGoTest(true) + // muck with some values + pb.F_BoolDefaulted = Bool(false) + pb.F_Int32Defaulted = Int32(237) + pb.F_Int64Defaulted = Int64(12346) + pb.F_Fixed32Defaulted = Uint32(32000) + pb.F_Fixed64Defaulted = Uint64(666) + pb.F_Uint32Defaulted = Uint32(323232) + pb.F_Uint64Defaulted = nil + pb.F_FloatDefaulted = nil + pb.F_DoubleDefaulted = Float64(0) + pb.F_StringDefaulted = String("gotcha") + pb.F_BytesDefaulted = []byte("asdfasdf") + pb.F_Sint32Defaulted = Int32(123) + pb.F_Sint64Defaulted = Int64(789) + pb.Reset() + checkInitialized(pb, t) +} + +// All required fields set, no defaults provided. +func TestEncodeDecode1(t *testing.T) { + pb := initGoTest(false) + overify(t, pb, + "0807"+ // field 1, encoding 0, value 7 + "220d"+"0a056c6162656c120474797065"+ // field 4, encoding 2 (GoTestField) + "5001"+ // field 10, encoding 0, value 1 + "5803"+ // field 11, encoding 0, value 3 + "6006"+ // field 12, encoding 0, value 6 + "6d20000000"+ // field 13, encoding 5, value 0x20 + "714000000000000000"+ // field 14, encoding 1, value 0x40 + "78a019"+ // field 15, encoding 0, value 0xca0 = 3232 + "8001c032"+ // field 16, encoding 0, value 0x1940 = 6464 + "8d0100004a45"+ // field 17, encoding 5, value 3232.0 + "9101000000000040b940"+ // field 18, encoding 1, value 6464.0 + "9a0106"+"737472696e67"+ // field 19, encoding 2, string "string" + "b304"+ // field 70, encoding 3, start group + "ba0408"+"7265717569726564"+ // field 71, encoding 2, string "required" + "b404"+ // field 70, encoding 4, end group + "aa0605"+"6279746573"+ // field 101, encoding 2, string "bytes" + "b0063f"+ // field 102, encoding 0, 0x3f zigzag32 + "b8067f") // field 103, encoding 0, 0x7f zigzag64 +} + +// All required fields set, defaults provided. +func TestEncodeDecode2(t *testing.T) { + pb := initGoTest(true) + overify(t, pb, + "0807"+ // field 1, encoding 0, value 7 + "220d"+"0a056c6162656c120474797065"+ // field 4, encoding 2 (GoTestField) + "5001"+ // field 10, encoding 0, value 1 + "5803"+ // field 11, encoding 0, value 3 + "6006"+ // field 12, encoding 0, value 6 + "6d20000000"+ // field 13, encoding 5, value 32 + "714000000000000000"+ // field 14, encoding 1, value 64 + "78a019"+ // field 15, encoding 0, value 3232 + "8001c032"+ // field 16, encoding 0, value 6464 + "8d0100004a45"+ // field 17, encoding 5, value 3232.0 + "9101000000000040b940"+ // field 18, encoding 1, value 6464.0 + "9a0106"+"737472696e67"+ // field 19, encoding 2 string "string" + "c00201"+ // field 40, encoding 0, value 1 + "c80220"+ // field 41, encoding 0, value 32 + "d00240"+ // field 42, encoding 0, value 64 + "dd0240010000"+ // field 43, encoding 5, value 320 + "e1028002000000000000"+ // field 44, encoding 1, value 640 + "e8028019"+ // field 45, encoding 0, value 3200 + "f0028032"+ // field 46, encoding 0, value 6400 + "fd02e0659948"+ // field 47, encoding 5, value 314159.0 + "81030000000050971041"+ // field 48, encoding 1, value 271828.0 + "8a0310"+"68656c6c6f2c2022776f726c6421220a"+ // field 49, encoding 2 string "hello, \"world!\"\n" + "b304"+ // start group field 70 level 1 + "ba0408"+"7265717569726564"+ // field 71, encoding 2, string "required" + "b404"+ // end group field 70 level 1 + "aa0605"+"6279746573"+ // field 101, encoding 2 string "bytes" + "b0063f"+ // field 102, encoding 0, 0x3f zigzag32 + "b8067f"+ // field 103, encoding 0, 0x7f zigzag64 + "8a1907"+"4269676e6f7365"+ // field 401, encoding 2, string "Bignose" + "90193f"+ // field 402, encoding 0, value 63 + "98197f") // field 403, encoding 0, value 127 + +} + +// All default fields set to their default value by hand +func TestEncodeDecode3(t *testing.T) { + pb := initGoTest(false) + pb.F_BoolDefaulted = Bool(true) + pb.F_Int32Defaulted = Int32(32) + pb.F_Int64Defaulted = Int64(64) + pb.F_Fixed32Defaulted = Uint32(320) + pb.F_Fixed64Defaulted = Uint64(640) + pb.F_Uint32Defaulted = Uint32(3200) + pb.F_Uint64Defaulted = Uint64(6400) + pb.F_FloatDefaulted = Float32(314159) + pb.F_DoubleDefaulted = Float64(271828) + pb.F_StringDefaulted = String("hello, \"world!\"\n") + pb.F_BytesDefaulted = []byte("Bignose") + pb.F_Sint32Defaulted = Int32(-32) + pb.F_Sint64Defaulted = Int64(-64) + + overify(t, pb, + "0807"+ // field 1, encoding 0, value 7 + "220d"+"0a056c6162656c120474797065"+ // field 4, encoding 2 (GoTestField) + "5001"+ // field 10, encoding 0, value 1 + "5803"+ // field 11, encoding 0, value 3 + "6006"+ // field 12, encoding 0, value 6 + "6d20000000"+ // field 13, encoding 5, value 32 + "714000000000000000"+ // field 14, encoding 1, value 64 + "78a019"+ // field 15, encoding 0, value 3232 + "8001c032"+ // field 16, encoding 0, value 6464 + "8d0100004a45"+ // field 17, encoding 5, value 3232.0 + "9101000000000040b940"+ // field 18, encoding 1, value 6464.0 + "9a0106"+"737472696e67"+ // field 19, encoding 2 string "string" + "c00201"+ // field 40, encoding 0, value 1 + "c80220"+ // field 41, encoding 0, value 32 + "d00240"+ // field 42, encoding 0, value 64 + "dd0240010000"+ // field 43, encoding 5, value 320 + "e1028002000000000000"+ // field 44, encoding 1, value 640 + "e8028019"+ // field 45, encoding 0, value 3200 + "f0028032"+ // field 46, encoding 0, value 6400 + "fd02e0659948"+ // field 47, encoding 5, value 314159.0 + "81030000000050971041"+ // field 48, encoding 1, value 271828.0 + "8a0310"+"68656c6c6f2c2022776f726c6421220a"+ // field 49, encoding 2 string "hello, \"world!\"\n" + "b304"+ // start group field 70 level 1 + "ba0408"+"7265717569726564"+ // field 71, encoding 2, string "required" + "b404"+ // end group field 70 level 1 + "aa0605"+"6279746573"+ // field 101, encoding 2 string "bytes" + "b0063f"+ // field 102, encoding 0, 0x3f zigzag32 + "b8067f"+ // field 103, encoding 0, 0x7f zigzag64 + "8a1907"+"4269676e6f7365"+ // field 401, encoding 2, string "Bignose" + "90193f"+ // field 402, encoding 0, value 63 + "98197f") // field 403, encoding 0, value 127 + +} + +// All required fields set, defaults provided, all non-defaulted optional fields have values. +func TestEncodeDecode4(t *testing.T) { + pb := initGoTest(true) + pb.Table = String("hello") + pb.Param = Int32(7) + pb.OptionalField = initGoTestField() + pb.F_BoolOptional = Bool(true) + pb.F_Int32Optional = Int32(32) + pb.F_Int64Optional = Int64(64) + pb.F_Fixed32Optional = Uint32(3232) + pb.F_Fixed64Optional = Uint64(6464) + pb.F_Uint32Optional = Uint32(323232) + pb.F_Uint64Optional = Uint64(646464) + pb.F_FloatOptional = Float32(32.) + pb.F_DoubleOptional = Float64(64.) + pb.F_StringOptional = String("hello") + pb.F_BytesOptional = []byte("Bignose") + pb.F_Sint32Optional = Int32(-32) + pb.F_Sint64Optional = Int64(-64) + pb.Optionalgroup = initGoTest_OptionalGroup() + + overify(t, pb, + "0807"+ // field 1, encoding 0, value 7 + "1205"+"68656c6c6f"+ // field 2, encoding 2, string "hello" + "1807"+ // field 3, encoding 0, value 7 + "220d"+"0a056c6162656c120474797065"+ // field 4, encoding 2 (GoTestField) + "320d"+"0a056c6162656c120474797065"+ // field 6, encoding 2 (GoTestField) + "5001"+ // field 10, encoding 0, value 1 + "5803"+ // field 11, encoding 0, value 3 + "6006"+ // field 12, encoding 0, value 6 + "6d20000000"+ // field 13, encoding 5, value 32 + "714000000000000000"+ // field 14, encoding 1, value 64 + "78a019"+ // field 15, encoding 0, value 3232 + "8001c032"+ // field 16, encoding 0, value 6464 + "8d0100004a45"+ // field 17, encoding 5, value 3232.0 + "9101000000000040b940"+ // field 18, encoding 1, value 6464.0 + "9a0106"+"737472696e67"+ // field 19, encoding 2 string "string" + "f00101"+ // field 30, encoding 0, value 1 + "f80120"+ // field 31, encoding 0, value 32 + "800240"+ // field 32, encoding 0, value 64 + "8d02a00c0000"+ // field 33, encoding 5, value 3232 + "91024019000000000000"+ // field 34, encoding 1, value 6464 + "9802a0dd13"+ // field 35, encoding 0, value 323232 + "a002c0ba27"+ // field 36, encoding 0, value 646464 + "ad0200000042"+ // field 37, encoding 5, value 32.0 + "b1020000000000005040"+ // field 38, encoding 1, value 64.0 + "ba0205"+"68656c6c6f"+ // field 39, encoding 2, string "hello" + "c00201"+ // field 40, encoding 0, value 1 + "c80220"+ // field 41, encoding 0, value 32 + "d00240"+ // field 42, encoding 0, value 64 + "dd0240010000"+ // field 43, encoding 5, value 320 + "e1028002000000000000"+ // field 44, encoding 1, value 640 + "e8028019"+ // field 45, encoding 0, value 3200 + "f0028032"+ // field 46, encoding 0, value 6400 + "fd02e0659948"+ // field 47, encoding 5, value 314159.0 + "81030000000050971041"+ // field 48, encoding 1, value 271828.0 + "8a0310"+"68656c6c6f2c2022776f726c6421220a"+ // field 49, encoding 2 string "hello, \"world!\"\n" + "b304"+ // start group field 70 level 1 + "ba0408"+"7265717569726564"+ // field 71, encoding 2, string "required" + "b404"+ // end group field 70 level 1 + "d305"+ // start group field 90 level 1 + "da0508"+"6f7074696f6e616c"+ // field 91, encoding 2, string "optional" + "d405"+ // end group field 90 level 1 + "aa0605"+"6279746573"+ // field 101, encoding 2 string "bytes" + "b0063f"+ // field 102, encoding 0, 0x3f zigzag32 + "b8067f"+ // field 103, encoding 0, 0x7f zigzag64 + "ea1207"+"4269676e6f7365"+ // field 301, encoding 2, string "Bignose" + "f0123f"+ // field 302, encoding 0, value 63 + "f8127f"+ // field 303, encoding 0, value 127 + "8a1907"+"4269676e6f7365"+ // field 401, encoding 2, string "Bignose" + "90193f"+ // field 402, encoding 0, value 63 + "98197f") // field 403, encoding 0, value 127 + +} + +// All required fields set, defaults provided, all repeated fields given two values. +func TestEncodeDecode5(t *testing.T) { + pb := initGoTest(true) + pb.RepeatedField = []*GoTestField{initGoTestField(), initGoTestField()} + pb.F_BoolRepeated = []bool{false, true} + pb.F_Int32Repeated = []int32{32, 33} + pb.F_Int64Repeated = []int64{64, 65} + pb.F_Fixed32Repeated = []uint32{3232, 3333} + pb.F_Fixed64Repeated = []uint64{6464, 6565} + pb.F_Uint32Repeated = []uint32{323232, 333333} + pb.F_Uint64Repeated = []uint64{646464, 656565} + pb.F_FloatRepeated = []float32{32., 33.} + pb.F_DoubleRepeated = []float64{64., 65.} + pb.F_StringRepeated = []string{"hello", "sailor"} + pb.F_BytesRepeated = [][]byte{[]byte("big"), []byte("nose")} + pb.F_Sint32Repeated = []int32{32, -32} + pb.F_Sint64Repeated = []int64{64, -64} + pb.Repeatedgroup = []*GoTest_RepeatedGroup{initGoTest_RepeatedGroup(), initGoTest_RepeatedGroup()} + + overify(t, pb, + "0807"+ // field 1, encoding 0, value 7 + "220d"+"0a056c6162656c120474797065"+ // field 4, encoding 2 (GoTestField) + "2a0d"+"0a056c6162656c120474797065"+ // field 5, encoding 2 (GoTestField) + "2a0d"+"0a056c6162656c120474797065"+ // field 5, encoding 2 (GoTestField) + "5001"+ // field 10, encoding 0, value 1 + "5803"+ // field 11, encoding 0, value 3 + "6006"+ // field 12, encoding 0, value 6 + "6d20000000"+ // field 13, encoding 5, value 32 + "714000000000000000"+ // field 14, encoding 1, value 64 + "78a019"+ // field 15, encoding 0, value 3232 + "8001c032"+ // field 16, encoding 0, value 6464 + "8d0100004a45"+ // field 17, encoding 5, value 3232.0 + "9101000000000040b940"+ // field 18, encoding 1, value 6464.0 + "9a0106"+"737472696e67"+ // field 19, encoding 2 string "string" + "a00100"+ // field 20, encoding 0, value 0 + "a00101"+ // field 20, encoding 0, value 1 + "a80120"+ // field 21, encoding 0, value 32 + "a80121"+ // field 21, encoding 0, value 33 + "b00140"+ // field 22, encoding 0, value 64 + "b00141"+ // field 22, encoding 0, value 65 + "bd01a00c0000"+ // field 23, encoding 5, value 3232 + "bd01050d0000"+ // field 23, encoding 5, value 3333 + "c1014019000000000000"+ // field 24, encoding 1, value 6464 + "c101a519000000000000"+ // field 24, encoding 1, value 6565 + "c801a0dd13"+ // field 25, encoding 0, value 323232 + "c80195ac14"+ // field 25, encoding 0, value 333333 + "d001c0ba27"+ // field 26, encoding 0, value 646464 + "d001b58928"+ // field 26, encoding 0, value 656565 + "dd0100000042"+ // field 27, encoding 5, value 32.0 + "dd0100000442"+ // field 27, encoding 5, value 33.0 + "e1010000000000005040"+ // field 28, encoding 1, value 64.0 + "e1010000000000405040"+ // field 28, encoding 1, value 65.0 + "ea0105"+"68656c6c6f"+ // field 29, encoding 2, string "hello" + "ea0106"+"7361696c6f72"+ // field 29, encoding 2, string "sailor" + "c00201"+ // field 40, encoding 0, value 1 + "c80220"+ // field 41, encoding 0, value 32 + "d00240"+ // field 42, encoding 0, value 64 + "dd0240010000"+ // field 43, encoding 5, value 320 + "e1028002000000000000"+ // field 44, encoding 1, value 640 + "e8028019"+ // field 45, encoding 0, value 3200 + "f0028032"+ // field 46, encoding 0, value 6400 + "fd02e0659948"+ // field 47, encoding 5, value 314159.0 + "81030000000050971041"+ // field 48, encoding 1, value 271828.0 + "8a0310"+"68656c6c6f2c2022776f726c6421220a"+ // field 49, encoding 2 string "hello, \"world!\"\n" + "b304"+ // start group field 70 level 1 + "ba0408"+"7265717569726564"+ // field 71, encoding 2, string "required" + "b404"+ // end group field 70 level 1 + "8305"+ // start group field 80 level 1 + "8a0508"+"7265706561746564"+ // field 81, encoding 2, string "repeated" + "8405"+ // end group field 80 level 1 + "8305"+ // start group field 80 level 1 + "8a0508"+"7265706561746564"+ // field 81, encoding 2, string "repeated" + "8405"+ // end group field 80 level 1 + "aa0605"+"6279746573"+ // field 101, encoding 2 string "bytes" + "b0063f"+ // field 102, encoding 0, 0x3f zigzag32 + "b8067f"+ // field 103, encoding 0, 0x7f zigzag64 + "ca0c03"+"626967"+ // field 201, encoding 2, string "big" + "ca0c04"+"6e6f7365"+ // field 201, encoding 2, string "nose" + "d00c40"+ // field 202, encoding 0, value 32 + "d00c3f"+ // field 202, encoding 0, value -32 + "d80c8001"+ // field 203, encoding 0, value 64 + "d80c7f"+ // field 203, encoding 0, value -64 + "8a1907"+"4269676e6f7365"+ // field 401, encoding 2, string "Bignose" + "90193f"+ // field 402, encoding 0, value 63 + "98197f") // field 403, encoding 0, value 127 + +} + +// All required fields set, all packed repeated fields given two values. +func TestEncodeDecode6(t *testing.T) { + pb := initGoTest(false) + pb.F_BoolRepeatedPacked = []bool{false, true} + pb.F_Int32RepeatedPacked = []int32{32, 33} + pb.F_Int64RepeatedPacked = []int64{64, 65} + pb.F_Fixed32RepeatedPacked = []uint32{3232, 3333} + pb.F_Fixed64RepeatedPacked = []uint64{6464, 6565} + pb.F_Uint32RepeatedPacked = []uint32{323232, 333333} + pb.F_Uint64RepeatedPacked = []uint64{646464, 656565} + pb.F_FloatRepeatedPacked = []float32{32., 33.} + pb.F_DoubleRepeatedPacked = []float64{64., 65.} + pb.F_Sint32RepeatedPacked = []int32{32, -32} + pb.F_Sint64RepeatedPacked = []int64{64, -64} + + overify(t, pb, + "0807"+ // field 1, encoding 0, value 7 + "220d"+"0a056c6162656c120474797065"+ // field 4, encoding 2 (GoTestField) + "5001"+ // field 10, encoding 0, value 1 + "5803"+ // field 11, encoding 0, value 3 + "6006"+ // field 12, encoding 0, value 6 + "6d20000000"+ // field 13, encoding 5, value 32 + "714000000000000000"+ // field 14, encoding 1, value 64 + "78a019"+ // field 15, encoding 0, value 3232 + "8001c032"+ // field 16, encoding 0, value 6464 + "8d0100004a45"+ // field 17, encoding 5, value 3232.0 + "9101000000000040b940"+ // field 18, encoding 1, value 6464.0 + "9a0106"+"737472696e67"+ // field 19, encoding 2 string "string" + "9203020001"+ // field 50, encoding 2, 2 bytes, value 0, value 1 + "9a03022021"+ // field 51, encoding 2, 2 bytes, value 32, value 33 + "a203024041"+ // field 52, encoding 2, 2 bytes, value 64, value 65 + "aa0308"+ // field 53, encoding 2, 8 bytes + "a00c0000050d0000"+ // value 3232, value 3333 + "b20310"+ // field 54, encoding 2, 16 bytes + "4019000000000000a519000000000000"+ // value 6464, value 6565 + "ba0306"+ // field 55, encoding 2, 6 bytes + "a0dd1395ac14"+ // value 323232, value 333333 + "c20306"+ // field 56, encoding 2, 6 bytes + "c0ba27b58928"+ // value 646464, value 656565 + "ca0308"+ // field 57, encoding 2, 8 bytes + "0000004200000442"+ // value 32.0, value 33.0 + "d20310"+ // field 58, encoding 2, 16 bytes + "00000000000050400000000000405040"+ // value 64.0, value 65.0 + "b304"+ // start group field 70 level 1 + "ba0408"+"7265717569726564"+ // field 71, encoding 2, string "required" + "b404"+ // end group field 70 level 1 + "aa0605"+"6279746573"+ // field 101, encoding 2 string "bytes" + "b0063f"+ // field 102, encoding 0, 0x3f zigzag32 + "b8067f"+ // field 103, encoding 0, 0x7f zigzag64 + "b21f02"+ // field 502, encoding 2, 2 bytes + "403f"+ // value 32, value -32 + "ba1f03"+ // field 503, encoding 2, 3 bytes + "80017f") // value 64, value -64 +} + +// Test that we can encode empty bytes fields. +func TestEncodeDecodeBytes1(t *testing.T) { + pb := initGoTest(false) + + // Create our bytes + pb.F_BytesRequired = []byte{} + pb.F_BytesRepeated = [][]byte{{}} + pb.F_BytesOptional = []byte{} + + d, err := Marshal(pb) + if err != nil { + t.Error(err) + } + + pbd := new(GoTest) + if err := Unmarshal(d, pbd); err != nil { + t.Error(err) + } + + if pbd.F_BytesRequired == nil || len(pbd.F_BytesRequired) != 0 { + t.Error("required empty bytes field is incorrect") + } + if pbd.F_BytesRepeated == nil || len(pbd.F_BytesRepeated) == 1 && pbd.F_BytesRepeated[0] == nil { + t.Error("repeated empty bytes field is incorrect") + } + if pbd.F_BytesOptional == nil || len(pbd.F_BytesOptional) != 0 { + t.Error("optional empty bytes field is incorrect") + } +} + +// Test that we encode nil-valued fields of a repeated bytes field correctly. +// Since entries in a repeated field cannot be nil, nil must mean empty value. +func TestEncodeDecodeBytes2(t *testing.T) { + pb := initGoTest(false) + + // Create our bytes + pb.F_BytesRepeated = [][]byte{nil} + + d, err := Marshal(pb) + if err != nil { + t.Error(err) + } + + pbd := new(GoTest) + if err := Unmarshal(d, pbd); err != nil { + t.Error(err) + } + + if len(pbd.F_BytesRepeated) != 1 || pbd.F_BytesRepeated[0] == nil { + t.Error("Unexpected value for repeated bytes field") + } +} + +// All required fields set, defaults provided, all repeated fields given two values. +func TestSkippingUnrecognizedFields(t *testing.T) { + o := old() + pb := initGoTestField() + + // Marshal it normally. + o.Marshal(pb) + + // Now new a GoSkipTest record. + skip := &GoSkipTest{ + SkipInt32: Int32(32), + SkipFixed32: Uint32(3232), + SkipFixed64: Uint64(6464), + SkipString: String("skipper"), + Skipgroup: &GoSkipTest_SkipGroup{ + GroupInt32: Int32(75), + GroupString: String("wxyz"), + }, + } + + // Marshal it into same buffer. + o.Marshal(skip) + + pbd := new(GoTestField) + o.Unmarshal(pbd) + + // The __unrecognized field should be a marshaling of GoSkipTest + skipd := new(GoSkipTest) + + o.SetBuf(pbd.XXX_unrecognized) + o.Unmarshal(skipd) + + if *skipd.SkipInt32 != *skip.SkipInt32 { + t.Error("skip int32", skipd.SkipInt32) + } + if *skipd.SkipFixed32 != *skip.SkipFixed32 { + t.Error("skip fixed32", skipd.SkipFixed32) + } + if *skipd.SkipFixed64 != *skip.SkipFixed64 { + t.Error("skip fixed64", skipd.SkipFixed64) + } + if *skipd.SkipString != *skip.SkipString { + t.Error("skip string", *skipd.SkipString) + } + if *skipd.Skipgroup.GroupInt32 != *skip.Skipgroup.GroupInt32 { + t.Error("skip group int32", skipd.Skipgroup.GroupInt32) + } + if *skipd.Skipgroup.GroupString != *skip.Skipgroup.GroupString { + t.Error("skip group string", *skipd.Skipgroup.GroupString) + } +} + +// Check that unrecognized fields of a submessage are preserved. +func TestSubmessageUnrecognizedFields(t *testing.T) { + nm := &NewMessage{ + Nested: &NewMessage_Nested{ + Name: String("Nigel"), + FoodGroup: String("carbs"), + }, + } + b, err := Marshal(nm) + if err != nil { + t.Fatalf("Marshal of NewMessage: %v", err) + } + + // Unmarshal into an OldMessage. + om := new(OldMessage) + if err := Unmarshal(b, om); err != nil { + t.Fatalf("Unmarshal to OldMessage: %v", err) + } + exp := &OldMessage{ + Nested: &OldMessage_Nested{ + Name: String("Nigel"), + // normal protocol buffer users should not do this + XXX_unrecognized: []byte("\x12\x05carbs"), + }, + } + if !Equal(om, exp) { + t.Errorf("om = %v, want %v", om, exp) + } + + // Clone the OldMessage. + om = Clone(om).(*OldMessage) + if !Equal(om, exp) { + t.Errorf("Clone(om) = %v, want %v", om, exp) + } + + // Marshal the OldMessage, then unmarshal it into an empty NewMessage. + if b, err = Marshal(om); err != nil { + t.Fatalf("Marshal of OldMessage: %v", err) + } + t.Logf("Marshal(%v) -> %q", om, b) + nm2 := new(NewMessage) + if err := Unmarshal(b, nm2); err != nil { + t.Fatalf("Unmarshal to NewMessage: %v", err) + } + if !Equal(nm, nm2) { + t.Errorf("NewMessage round-trip: %v => %v", nm, nm2) + } +} + +// Check that an int32 field can be upgraded to an int64 field. +func TestNegativeInt32(t *testing.T) { + om := &OldMessage{ + Num: Int32(-1), + } + b, err := Marshal(om) + if err != nil { + t.Fatalf("Marshal of OldMessage: %v", err) + } + + // Check the size. It should be 11 bytes; + // 1 for the field/wire type, and 10 for the negative number. + if len(b) != 11 { + t.Errorf("%v marshaled as %q, wanted 11 bytes", om, b) + } + + // Unmarshal into a NewMessage. + nm := new(NewMessage) + if err := Unmarshal(b, nm); err != nil { + t.Fatalf("Unmarshal to NewMessage: %v", err) + } + want := &NewMessage{ + Num: Int64(-1), + } + if !Equal(nm, want) { + t.Errorf("nm = %v, want %v", nm, want) + } +} + +// Check that we can grow an array (repeated field) to have many elements. +// This test doesn't depend only on our encoding; for variety, it makes sure +// we create, encode, and decode the correct contents explicitly. It's therefore +// a bit messier. +// This test also uses (and hence tests) the Marshal/Unmarshal functions +// instead of the methods. +func TestBigRepeated(t *testing.T) { + pb := initGoTest(true) + + // Create the arrays + const N = 50 // Internally the library starts much smaller. + pb.Repeatedgroup = make([]*GoTest_RepeatedGroup, N) + pb.F_Sint64Repeated = make([]int64, N) + pb.F_Sint32Repeated = make([]int32, N) + pb.F_BytesRepeated = make([][]byte, N) + pb.F_StringRepeated = make([]string, N) + pb.F_DoubleRepeated = make([]float64, N) + pb.F_FloatRepeated = make([]float32, N) + pb.F_Uint64Repeated = make([]uint64, N) + pb.F_Uint32Repeated = make([]uint32, N) + pb.F_Fixed64Repeated = make([]uint64, N) + pb.F_Fixed32Repeated = make([]uint32, N) + pb.F_Int64Repeated = make([]int64, N) + pb.F_Int32Repeated = make([]int32, N) + pb.F_BoolRepeated = make([]bool, N) + pb.RepeatedField = make([]*GoTestField, N) + + // Fill in the arrays with checkable values. + igtf := initGoTestField() + igtrg := initGoTest_RepeatedGroup() + for i := 0; i < N; i++ { + pb.Repeatedgroup[i] = igtrg + pb.F_Sint64Repeated[i] = int64(i) + pb.F_Sint32Repeated[i] = int32(i) + s := fmt.Sprint(i) + pb.F_BytesRepeated[i] = []byte(s) + pb.F_StringRepeated[i] = s + pb.F_DoubleRepeated[i] = float64(i) + pb.F_FloatRepeated[i] = float32(i) + pb.F_Uint64Repeated[i] = uint64(i) + pb.F_Uint32Repeated[i] = uint32(i) + pb.F_Fixed64Repeated[i] = uint64(i) + pb.F_Fixed32Repeated[i] = uint32(i) + pb.F_Int64Repeated[i] = int64(i) + pb.F_Int32Repeated[i] = int32(i) + pb.F_BoolRepeated[i] = i%2 == 0 + pb.RepeatedField[i] = igtf + } + + // Marshal. + buf, _ := Marshal(pb) + + // Now test Unmarshal by recreating the original buffer. + pbd := new(GoTest) + Unmarshal(buf, pbd) + + // Check the checkable values + for i := uint64(0); i < N; i++ { + if pbd.Repeatedgroup[i] == nil { // TODO: more checking? + t.Error("pbd.Repeatedgroup bad") + } + var x uint64 + x = uint64(pbd.F_Sint64Repeated[i]) + if x != i { + t.Error("pbd.F_Sint64Repeated bad", x, i) + } + x = uint64(pbd.F_Sint32Repeated[i]) + if x != i { + t.Error("pbd.F_Sint32Repeated bad", x, i) + } + s := fmt.Sprint(i) + equalbytes(pbd.F_BytesRepeated[i], []byte(s), t) + if pbd.F_StringRepeated[i] != s { + t.Error("pbd.F_Sint32Repeated bad", pbd.F_StringRepeated[i], i) + } + x = uint64(pbd.F_DoubleRepeated[i]) + if x != i { + t.Error("pbd.F_DoubleRepeated bad", x, i) + } + x = uint64(pbd.F_FloatRepeated[i]) + if x != i { + t.Error("pbd.F_FloatRepeated bad", x, i) + } + x = pbd.F_Uint64Repeated[i] + if x != i { + t.Error("pbd.F_Uint64Repeated bad", x, i) + } + x = uint64(pbd.F_Uint32Repeated[i]) + if x != i { + t.Error("pbd.F_Uint32Repeated bad", x, i) + } + x = pbd.F_Fixed64Repeated[i] + if x != i { + t.Error("pbd.F_Fixed64Repeated bad", x, i) + } + x = uint64(pbd.F_Fixed32Repeated[i]) + if x != i { + t.Error("pbd.F_Fixed32Repeated bad", x, i) + } + x = uint64(pbd.F_Int64Repeated[i]) + if x != i { + t.Error("pbd.F_Int64Repeated bad", x, i) + } + x = uint64(pbd.F_Int32Repeated[i]) + if x != i { + t.Error("pbd.F_Int32Repeated bad", x, i) + } + if pbd.F_BoolRepeated[i] != (i%2 == 0) { + t.Error("pbd.F_BoolRepeated bad", x, i) + } + if pbd.RepeatedField[i] == nil { // TODO: more checking? + t.Error("pbd.RepeatedField bad") + } + } +} + +// Verify we give a useful message when decoding to the wrong structure type. +func TestTypeMismatch(t *testing.T) { + pb1 := initGoTest(true) + + // Marshal + o := old() + o.Marshal(pb1) + + // Now Unmarshal it to the wrong type. + pb2 := initGoTestField() + err := o.Unmarshal(pb2) + if err == nil { + t.Error("expected error, got no error") + } else if !strings.Contains(err.Error(), "bad wiretype") { + t.Error("expected bad wiretype error, got", err) + } +} + +func encodeDecode(t *testing.T, in, out Message, msg string) { + buf, err := Marshal(in) + if err != nil { + t.Fatalf("failed marshaling %v: %v", msg, err) + } + if err := Unmarshal(buf, out); err != nil { + t.Fatalf("failed unmarshaling %v: %v", msg, err) + } +} + +func TestPackedNonPackedDecoderSwitching(t *testing.T) { + np, p := new(NonPackedTest), new(PackedTest) + + // non-packed -> packed + np.A = []int32{0, 1, 1, 2, 3, 5} + encodeDecode(t, np, p, "non-packed -> packed") + if !reflect.DeepEqual(np.A, p.B) { + t.Errorf("failed non-packed -> packed; np.A=%+v, p.B=%+v", np.A, p.B) + } + + // packed -> non-packed + np.Reset() + p.B = []int32{3, 1, 4, 1, 5, 9} + encodeDecode(t, p, np, "packed -> non-packed") + if !reflect.DeepEqual(p.B, np.A) { + t.Errorf("failed packed -> non-packed; p.B=%+v, np.A=%+v", p.B, np.A) + } +} + +func TestProto1RepeatedGroup(t *testing.T) { + pb := &MessageList{ + Message: []*MessageList_Message{ + { + Name: String("blah"), + Count: Int32(7), + }, + // NOTE: pb.Message[1] is a nil + nil, + }, + } + + o := old() + err := o.Marshal(pb) + if err == nil || !strings.Contains(err.Error(), "repeated field Message has nil") { + t.Fatalf("unexpected or no error when marshaling: %v", err) + } +} + +// Test that enums work. Checks for a bug introduced by making enums +// named types instead of int32: newInt32FromUint64 would crash with +// a type mismatch in reflect.PointTo. +func TestEnum(t *testing.T) { + pb := new(GoEnum) + pb.Foo = FOO_FOO1.Enum() + o := old() + if err := o.Marshal(pb); err != nil { + t.Fatal("error encoding enum:", err) + } + pb1 := new(GoEnum) + if err := o.Unmarshal(pb1); err != nil { + t.Fatal("error decoding enum:", err) + } + if *pb1.Foo != FOO_FOO1 { + t.Error("expected 7 but got ", *pb1.Foo) + } +} + +// Enum types have String methods. Check that enum fields can be printed. +// We don't care what the value actually is, just as long as it doesn't crash. +func TestPrintingNilEnumFields(t *testing.T) { + pb := new(GoEnum) + _ = fmt.Sprintf("%+v", pb) +} + +// Verify that absent required fields cause Marshal/Unmarshal to return errors. +func TestRequiredFieldEnforcement(t *testing.T) { + pb := new(GoTestField) + _, err := Marshal(pb) + if err == nil { + t.Error("marshal: expected error, got nil") + } else if _, ok := err.(*RequiredNotSetError); !ok || !strings.Contains(err.Error(), "Label") { + t.Errorf("marshal: bad error type: %v", err) + } + + // A slightly sneaky, yet valid, proto. It encodes the same required field twice, + // so simply counting the required fields is insufficient. + // field 1, encoding 2, value "hi" + buf := []byte("\x0A\x02hi\x0A\x02hi") + err = Unmarshal(buf, pb) + if err == nil { + t.Error("unmarshal: expected error, got nil") + } else if _, ok := err.(*RequiredNotSetError); !ok || !strings.Contains(err.Error(), "{Unknown}") { + t.Errorf("unmarshal: bad error type: %v", err) + } +} + +// Verify that absent required fields in groups cause Marshal/Unmarshal to return errors. +func TestRequiredFieldEnforcementGroups(t *testing.T) { + pb := &GoTestRequiredGroupField{Group: &GoTestRequiredGroupField_Group{}} + if _, err := Marshal(pb); err == nil { + t.Error("marshal: expected error, got nil") + } else if _, ok := err.(*RequiredNotSetError); !ok || !strings.Contains(err.Error(), "Group.Field") { + t.Errorf("marshal: bad error type: %v", err) + } + + buf := []byte{11, 12} + if err := Unmarshal(buf, pb); err == nil { + t.Error("unmarshal: expected error, got nil") + } else if _, ok := err.(*RequiredNotSetError); !ok || !strings.Contains(err.Error(), "Group.{Unknown}") { + t.Errorf("unmarshal: bad error type: %v", err) + } +} + +func TestTypedNilMarshal(t *testing.T) { + // A typed nil should return ErrNil and not crash. + { + var m *GoEnum + if _, err := Marshal(m); err != ErrNil { + t.Errorf("Marshal(%#v): got %v, want ErrNil", m, err) + } + } + + { + m := &Communique{Union: &Communique_Msg{nil}} + if _, err := Marshal(m); err == nil || err == ErrNil { + t.Errorf("Marshal(%#v): got %v, want errOneofHasNil", m, err) + } + } +} + +// A type that implements the Marshaler interface, but is not nillable. +type nonNillableInt uint64 + +func (nni nonNillableInt) Marshal() ([]byte, error) { + return EncodeVarint(uint64(nni)), nil +} + +type NNIMessage struct { + nni nonNillableInt +} + +func (*NNIMessage) Reset() {} +func (*NNIMessage) String() string { return "" } +func (*NNIMessage) ProtoMessage() {} + +// A type that implements the Marshaler interface and is nillable. +type nillableMessage struct { + x uint64 +} + +func (nm *nillableMessage) Marshal() ([]byte, error) { + return EncodeVarint(nm.x), nil +} + +type NMMessage struct { + nm *nillableMessage +} + +func (*NMMessage) Reset() {} +func (*NMMessage) String() string { return "" } +func (*NMMessage) ProtoMessage() {} + +// Verify a type that uses the Marshaler interface, but has a nil pointer. +func TestNilMarshaler(t *testing.T) { + // Try a struct with a Marshaler field that is nil. + // It should be directly marshable. + nmm := new(NMMessage) + if _, err := Marshal(nmm); err != nil { + t.Error("unexpected error marshaling nmm: ", err) + } + + // Try a struct with a Marshaler field that is not nillable. + nnim := new(NNIMessage) + nnim.nni = 7 + var _ Marshaler = nnim.nni // verify it is truly a Marshaler + if _, err := Marshal(nnim); err != nil { + t.Error("unexpected error marshaling nnim: ", err) + } +} + +func TestAllSetDefaults(t *testing.T) { + // Exercise SetDefaults with all scalar field types. + m := &Defaults{ + // NaN != NaN, so override that here. + F_Nan: Float32(1.7), + } + expected := &Defaults{ + F_Bool: Bool(true), + F_Int32: Int32(32), + F_Int64: Int64(64), + F_Fixed32: Uint32(320), + F_Fixed64: Uint64(640), + F_Uint32: Uint32(3200), + F_Uint64: Uint64(6400), + F_Float: Float32(314159), + F_Double: Float64(271828), + F_String: String(`hello, "world!"` + "\n"), + F_Bytes: []byte("Bignose"), + F_Sint32: Int32(-32), + F_Sint64: Int64(-64), + F_Enum: Defaults_GREEN.Enum(), + F_Pinf: Float32(float32(math.Inf(1))), + F_Ninf: Float32(float32(math.Inf(-1))), + F_Nan: Float32(1.7), + StrZero: String(""), + } + SetDefaults(m) + if !Equal(m, expected) { + t.Errorf("SetDefaults failed\n got %v\nwant %v", m, expected) + } +} + +func TestSetDefaultsWithSetField(t *testing.T) { + // Check that a set value is not overridden. + m := &Defaults{ + F_Int32: Int32(12), + } + SetDefaults(m) + if v := m.GetF_Int32(); v != 12 { + t.Errorf("m.FInt32 = %v, want 12", v) + } +} + +func TestSetDefaultsWithSubMessage(t *testing.T) { + m := &OtherMessage{ + Key: Int64(123), + Inner: &InnerMessage{ + Host: String("gopher"), + }, + } + expected := &OtherMessage{ + Key: Int64(123), + Inner: &InnerMessage{ + Host: String("gopher"), + Port: Int32(4000), + }, + } + SetDefaults(m) + if !Equal(m, expected) { + t.Errorf("\n got %v\nwant %v", m, expected) + } +} + +func TestSetDefaultsWithRepeatedSubMessage(t *testing.T) { + m := &MyMessage{ + RepInner: []*InnerMessage{{}}, + } + expected := &MyMessage{ + RepInner: []*InnerMessage{{ + Port: Int32(4000), + }}, + } + SetDefaults(m) + if !Equal(m, expected) { + t.Errorf("\n got %v\nwant %v", m, expected) + } +} + +func TestSetDefaultWithRepeatedNonMessage(t *testing.T) { + m := &MyMessage{ + Pet: []string{"turtle", "wombat"}, + } + expected := Clone(m) + SetDefaults(m) + if !Equal(m, expected) { + t.Errorf("\n got %v\nwant %v", m, expected) + } +} + +func TestMaximumTagNumber(t *testing.T) { + m := &MaxTag{ + LastField: String("natural goat essence"), + } + buf, err := Marshal(m) + if err != nil { + t.Fatalf("proto.Marshal failed: %v", err) + } + m2 := new(MaxTag) + if err := Unmarshal(buf, m2); err != nil { + t.Fatalf("proto.Unmarshal failed: %v", err) + } + if got, want := m2.GetLastField(), *m.LastField; got != want { + t.Errorf("got %q, want %q", got, want) + } +} + +func TestJSON(t *testing.T) { + m := &MyMessage{ + Count: Int32(4), + Pet: []string{"bunny", "kitty"}, + Inner: &InnerMessage{ + Host: String("cauchy"), + }, + Bikeshed: MyMessage_GREEN.Enum(), + } + const expected = `{"count":4,"pet":["bunny","kitty"],"inner":{"host":"cauchy"},"bikeshed":1}` + + b, err := json.Marshal(m) + if err != nil { + t.Fatalf("json.Marshal failed: %v", err) + } + s := string(b) + if s != expected { + t.Errorf("got %s\nwant %s", s, expected) + } + + received := new(MyMessage) + if err := json.Unmarshal(b, received); err != nil { + t.Fatalf("json.Unmarshal failed: %v", err) + } + if !Equal(received, m) { + t.Fatalf("got %s, want %s", received, m) + } + + // Test unmarshalling of JSON with symbolic enum name. + const old = `{"count":4,"pet":["bunny","kitty"],"inner":{"host":"cauchy"},"bikeshed":"GREEN"}` + received.Reset() + if err := json.Unmarshal([]byte(old), received); err != nil { + t.Fatalf("json.Unmarshal failed: %v", err) + } + if !Equal(received, m) { + t.Fatalf("got %s, want %s", received, m) + } +} + +func TestBadWireType(t *testing.T) { + b := []byte{7<<3 | 6} // field 7, wire type 6 + pb := new(OtherMessage) + if err := Unmarshal(b, pb); err == nil { + t.Errorf("Unmarshal did not fail") + } else if !strings.Contains(err.Error(), "unknown wire type") { + t.Errorf("wrong error: %v", err) + } +} + +func TestBytesWithInvalidLength(t *testing.T) { + // If a byte sequence has an invalid (negative) length, Unmarshal should not panic. + b := []byte{2<<3 | WireBytes, 0xff, 0xff, 0xff, 0xff, 0xff, 0} + Unmarshal(b, new(MyMessage)) +} + +func TestLengthOverflow(t *testing.T) { + // Overflowing a length should not panic. + b := []byte{2<<3 | WireBytes, 1, 1, 3<<3 | WireBytes, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f, 0x01} + Unmarshal(b, new(MyMessage)) +} + +func TestVarintOverflow(t *testing.T) { + // Overflowing a 64-bit length should not be allowed. + b := []byte{1<<3 | WireVarint, 0x01, 3<<3 | WireBytes, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x01} + if err := Unmarshal(b, new(MyMessage)); err == nil { + t.Fatalf("Overflowed uint64 length without error") + } +} + +func TestUnmarshalFuzz(t *testing.T) { + const N = 1000 + seed := time.Now().UnixNano() + t.Logf("RNG seed is %d", seed) + rng := rand.New(rand.NewSource(seed)) + buf := make([]byte, 20) + for i := 0; i < N; i++ { + for j := range buf { + buf[j] = byte(rng.Intn(256)) + } + fuzzUnmarshal(t, buf) + } +} + +func TestMergeMessages(t *testing.T) { + pb := &MessageList{Message: []*MessageList_Message{{Name: String("x"), Count: Int32(1)}}} + data, err := Marshal(pb) + if err != nil { + t.Fatalf("Marshal: %v", err) + } + + pb1 := new(MessageList) + if err := Unmarshal(data, pb1); err != nil { + t.Fatalf("first Unmarshal: %v", err) + } + if err := Unmarshal(data, pb1); err != nil { + t.Fatalf("second Unmarshal: %v", err) + } + if len(pb1.Message) != 1 { + t.Errorf("two Unmarshals produced %d Messages, want 1", len(pb1.Message)) + } + + pb2 := new(MessageList) + if err := UnmarshalMerge(data, pb2); err != nil { + t.Fatalf("first UnmarshalMerge: %v", err) + } + if err := UnmarshalMerge(data, pb2); err != nil { + t.Fatalf("second UnmarshalMerge: %v", err) + } + if len(pb2.Message) != 2 { + t.Errorf("two UnmarshalMerges produced %d Messages, want 2", len(pb2.Message)) + } +} + +func TestExtensionMarshalOrder(t *testing.T) { + m := &MyMessage{Count: Int(123)} + if err := SetExtension(m, E_Ext_More, &Ext{Data: String("alpha")}); err != nil { + t.Fatalf("SetExtension: %v", err) + } + if err := SetExtension(m, E_Ext_Text, String("aleph")); err != nil { + t.Fatalf("SetExtension: %v", err) + } + if err := SetExtension(m, E_Ext_Number, Int32(1)); err != nil { + t.Fatalf("SetExtension: %v", err) + } + + // Serialize m several times, and check we get the same bytes each time. + var orig []byte + for i := 0; i < 100; i++ { + b, err := Marshal(m) + if err != nil { + t.Fatalf("Marshal: %v", err) + } + if i == 0 { + orig = b + continue + } + if !bytes.Equal(b, orig) { + t.Errorf("Bytes differ on attempt #%d", i) + } + } +} + +// Many extensions, because small maps might not iterate differently on each iteration. +var exts = []*ExtensionDesc{ + E_X201, + E_X202, + E_X203, + E_X204, + E_X205, + E_X206, + E_X207, + E_X208, + E_X209, + E_X210, + E_X211, + E_X212, + E_X213, + E_X214, + E_X215, + E_X216, + E_X217, + E_X218, + E_X219, + E_X220, + E_X221, + E_X222, + E_X223, + E_X224, + E_X225, + E_X226, + E_X227, + E_X228, + E_X229, + E_X230, + E_X231, + E_X232, + E_X233, + E_X234, + E_X235, + E_X236, + E_X237, + E_X238, + E_X239, + E_X240, + E_X241, + E_X242, + E_X243, + E_X244, + E_X245, + E_X246, + E_X247, + E_X248, + E_X249, + E_X250, +} + +func TestMessageSetMarshalOrder(t *testing.T) { + m := &MyMessageSet{} + for _, x := range exts { + if err := SetExtension(m, x, &Empty{}); err != nil { + t.Fatalf("SetExtension: %v", err) + } + } + + buf, err := Marshal(m) + if err != nil { + t.Fatalf("Marshal: %v", err) + } + + // Serialize m several times, and check we get the same bytes each time. + for i := 0; i < 10; i++ { + b1, err := Marshal(m) + if err != nil { + t.Fatalf("Marshal: %v", err) + } + if !bytes.Equal(b1, buf) { + t.Errorf("Bytes differ on re-Marshal #%d", i) + } + + m2 := &MyMessageSet{} + if err := Unmarshal(buf, m2); err != nil { + t.Errorf("Unmarshal: %v", err) + } + b2, err := Marshal(m2) + if err != nil { + t.Errorf("re-Marshal: %v", err) + } + if !bytes.Equal(b2, buf) { + t.Errorf("Bytes differ on round-trip #%d", i) + } + } +} + +func TestUnmarshalMergesMessages(t *testing.T) { + // If a nested message occurs twice in the input, + // the fields should be merged when decoding. + a := &OtherMessage{ + Key: Int64(123), + Inner: &InnerMessage{ + Host: String("polhode"), + Port: Int32(1234), + }, + } + aData, err := Marshal(a) + if err != nil { + t.Fatalf("Marshal(a): %v", err) + } + b := &OtherMessage{ + Weight: Float32(1.2), + Inner: &InnerMessage{ + Host: String("herpolhode"), + Connected: Bool(true), + }, + } + bData, err := Marshal(b) + if err != nil { + t.Fatalf("Marshal(b): %v", err) + } + want := &OtherMessage{ + Key: Int64(123), + Weight: Float32(1.2), + Inner: &InnerMessage{ + Host: String("herpolhode"), + Port: Int32(1234), + Connected: Bool(true), + }, + } + got := new(OtherMessage) + if err := Unmarshal(append(aData, bData...), got); err != nil { + t.Fatalf("Unmarshal: %v", err) + } + if !Equal(got, want) { + t.Errorf("\n got %v\nwant %v", got, want) + } +} + +func TestEncodingSizes(t *testing.T) { + tests := []struct { + m Message + n int + }{ + {&Defaults{F_Int32: Int32(math.MaxInt32)}, 6}, + {&Defaults{F_Int32: Int32(math.MinInt32)}, 11}, + {&Defaults{F_Uint32: Uint32(uint32(math.MaxInt32) + 1)}, 6}, + {&Defaults{F_Uint32: Uint32(math.MaxUint32)}, 6}, + } + for _, test := range tests { + b, err := Marshal(test.m) + if err != nil { + t.Errorf("Marshal(%v): %v", test.m, err) + continue + } + if len(b) != test.n { + t.Errorf("Marshal(%v) yielded %d bytes, want %d bytes", test.m, len(b), test.n) + } + } +} + +func TestRequiredNotSetError(t *testing.T) { + pb := initGoTest(false) + pb.RequiredField.Label = nil + pb.F_Int32Required = nil + pb.F_Int64Required = nil + + expected := "0807" + // field 1, encoding 0, value 7 + "2206" + "120474797065" + // field 4, encoding 2 (GoTestField) + "5001" + // field 10, encoding 0, value 1 + "6d20000000" + // field 13, encoding 5, value 0x20 + "714000000000000000" + // field 14, encoding 1, value 0x40 + "78a019" + // field 15, encoding 0, value 0xca0 = 3232 + "8001c032" + // field 16, encoding 0, value 0x1940 = 6464 + "8d0100004a45" + // field 17, encoding 5, value 3232.0 + "9101000000000040b940" + // field 18, encoding 1, value 6464.0 + "9a0106" + "737472696e67" + // field 19, encoding 2, string "string" + "b304" + // field 70, encoding 3, start group + "ba0408" + "7265717569726564" + // field 71, encoding 2, string "required" + "b404" + // field 70, encoding 4, end group + "aa0605" + "6279746573" + // field 101, encoding 2, string "bytes" + "b0063f" + // field 102, encoding 0, 0x3f zigzag32 + "b8067f" // field 103, encoding 0, 0x7f zigzag64 + + o := old() + bytes, err := Marshal(pb) + if _, ok := err.(*RequiredNotSetError); !ok { + fmt.Printf("marshal-1 err = %v, want *RequiredNotSetError", err) + o.DebugPrint("", bytes) + t.Fatalf("expected = %s", expected) + } + if strings.Index(err.Error(), "RequiredField.Label") < 0 { + t.Errorf("marshal-1 wrong err msg: %v", err) + } + if !equal(bytes, expected, t) { + o.DebugPrint("neq 1", bytes) + t.Fatalf("expected = %s", expected) + } + + // Now test Unmarshal by recreating the original buffer. + pbd := new(GoTest) + err = Unmarshal(bytes, pbd) + if _, ok := err.(*RequiredNotSetError); !ok { + t.Fatalf("unmarshal err = %v, want *RequiredNotSetError", err) + o.DebugPrint("", bytes) + t.Fatalf("string = %s", expected) + } + if strings.Index(err.Error(), "RequiredField.{Unknown}") < 0 { + t.Errorf("unmarshal wrong err msg: %v", err) + } + bytes, err = Marshal(pbd) + if _, ok := err.(*RequiredNotSetError); !ok { + t.Errorf("marshal-2 err = %v, want *RequiredNotSetError", err) + o.DebugPrint("", bytes) + t.Fatalf("string = %s", expected) + } + if strings.Index(err.Error(), "RequiredField.Label") < 0 { + t.Errorf("marshal-2 wrong err msg: %v", err) + } + if !equal(bytes, expected, t) { + o.DebugPrint("neq 2", bytes) + t.Fatalf("string = %s", expected) + } +} + +func fuzzUnmarshal(t *testing.T, data []byte) { + defer func() { + if e := recover(); e != nil { + t.Errorf("These bytes caused a panic: %+v", data) + t.Logf("Stack:\n%s", debug.Stack()) + t.FailNow() + } + }() + + pb := new(MyMessage) + Unmarshal(data, pb) +} + +func TestMapFieldMarshal(t *testing.T) { + m := &MessageWithMap{ + NameMapping: map[int32]string{ + 1: "Rob", + 4: "Ian", + 8: "Dave", + }, + } + b, err := Marshal(m) + if err != nil { + t.Fatalf("Marshal: %v", err) + } + + // b should be the concatenation of these three byte sequences in some order. + parts := []string{ + "\n\a\b\x01\x12\x03Rob", + "\n\a\b\x04\x12\x03Ian", + "\n\b\b\x08\x12\x04Dave", + } + ok := false + for i := range parts { + for j := range parts { + if j == i { + continue + } + for k := range parts { + if k == i || k == j { + continue + } + try := parts[i] + parts[j] + parts[k] + if bytes.Equal(b, []byte(try)) { + ok = true + break + } + } + } + } + if !ok { + t.Fatalf("Incorrect Marshal output.\n got %q\nwant %q (or a permutation of that)", b, parts[0]+parts[1]+parts[2]) + } + t.Logf("FYI b: %q", b) + + (new(Buffer)).DebugPrint("Dump of b", b) +} + +func TestMapFieldRoundTrips(t *testing.T) { + m := &MessageWithMap{ + NameMapping: map[int32]string{ + 1: "Rob", + 4: "Ian", + 8: "Dave", + }, + MsgMapping: map[int64]*FloatingPoint{ + 0x7001: &FloatingPoint{F: Float64(2.0)}, + }, + ByteMapping: map[bool][]byte{ + false: []byte("that's not right!"), + true: []byte("aye, 'tis true!"), + }, + } + b, err := Marshal(m) + if err != nil { + t.Fatalf("Marshal: %v", err) + } + t.Logf("FYI b: %q", b) + m2 := new(MessageWithMap) + if err := Unmarshal(b, m2); err != nil { + t.Fatalf("Unmarshal: %v", err) + } + for _, pair := range [][2]interface{}{ + {m.NameMapping, m2.NameMapping}, + {m.MsgMapping, m2.MsgMapping}, + {m.ByteMapping, m2.ByteMapping}, + } { + if !reflect.DeepEqual(pair[0], pair[1]) { + t.Errorf("Map did not survive a round trip.\ninitial: %v\n final: %v", pair[0], pair[1]) + } + } +} + +func TestMapFieldWithNil(t *testing.T) { + m1 := &MessageWithMap{ + MsgMapping: map[int64]*FloatingPoint{ + 1: nil, + }, + } + b, err := Marshal(m1) + if err != nil { + t.Fatalf("Marshal: %v", err) + } + m2 := new(MessageWithMap) + if err := Unmarshal(b, m2); err != nil { + t.Fatalf("Unmarshal: %v, got these bytes: %v", err, b) + } + if v, ok := m2.MsgMapping[1]; !ok { + t.Error("msg_mapping[1] not present") + } else if v != nil { + t.Errorf("msg_mapping[1] not nil: %v", v) + } +} + +func TestMapFieldWithNilBytes(t *testing.T) { + m1 := &MessageWithMap{ + ByteMapping: map[bool][]byte{ + false: []byte{}, + true: nil, + }, + } + n := Size(m1) + b, err := Marshal(m1) + if err != nil { + t.Fatalf("Marshal: %v", err) + } + if n != len(b) { + t.Errorf("Size(m1) = %d; want len(Marshal(m1)) = %d", n, len(b)) + } + m2 := new(MessageWithMap) + if err := Unmarshal(b, m2); err != nil { + t.Fatalf("Unmarshal: %v, got these bytes: %v", err, b) + } + if v, ok := m2.ByteMapping[false]; !ok { + t.Error("byte_mapping[false] not present") + } else if len(v) != 0 { + t.Errorf("byte_mapping[false] not empty: %#v", v) + } + if v, ok := m2.ByteMapping[true]; !ok { + t.Error("byte_mapping[true] not present") + } else if len(v) != 0 { + t.Errorf("byte_mapping[true] not empty: %#v", v) + } +} + +func TestDecodeMapFieldMissingKey(t *testing.T) { + b := []byte{ + 0x0A, 0x03, // message, tag 1 (name_mapping), of length 3 bytes + // no key + 0x12, 0x01, 0x6D, // string value of length 1 byte, value "m" + } + got := &MessageWithMap{} + err := Unmarshal(b, got) + if err != nil { + t.Fatalf("failed to marshal map with missing key: %v", err) + } + want := &MessageWithMap{NameMapping: map[int32]string{0: "m"}} + if !Equal(got, want) { + t.Errorf("Unmarshaled map with no key was not as expected. got: %v, want %v", got, want) + } +} + +func TestDecodeMapFieldMissingValue(t *testing.T) { + b := []byte{ + 0x0A, 0x02, // message, tag 1 (name_mapping), of length 2 bytes + 0x08, 0x01, // varint key, value 1 + // no value + } + got := &MessageWithMap{} + err := Unmarshal(b, got) + if err != nil { + t.Fatalf("failed to marshal map with missing value: %v", err) + } + want := &MessageWithMap{NameMapping: map[int32]string{1: ""}} + if !Equal(got, want) { + t.Errorf("Unmarshaled map with no value was not as expected. got: %v, want %v", got, want) + } +} + +func TestOneof(t *testing.T) { + m := &Communique{} + b, err := Marshal(m) + if err != nil { + t.Fatalf("Marshal of empty message with oneof: %v", err) + } + if len(b) != 0 { + t.Errorf("Marshal of empty message yielded too many bytes: %v", b) + } + + m = &Communique{ + Union: &Communique_Name{"Barry"}, + } + + // Round-trip. + b, err = Marshal(m) + if err != nil { + t.Fatalf("Marshal of message with oneof: %v", err) + } + if len(b) != 7 { // name tag/wire (1) + name len (1) + name (5) + t.Errorf("Incorrect marshal of message with oneof: %v", b) + } + m.Reset() + if err := Unmarshal(b, m); err != nil { + t.Fatalf("Unmarshal of message with oneof: %v", err) + } + if x, ok := m.Union.(*Communique_Name); !ok || x.Name != "Barry" { + t.Errorf("After round trip, Union = %+v", m.Union) + } + if name := m.GetName(); name != "Barry" { + t.Errorf("After round trip, GetName = %q, want %q", name, "Barry") + } + + // Let's try with a message in the oneof. + m.Union = &Communique_Msg{&Strings{StringField: String("deep deep string")}} + b, err = Marshal(m) + if err != nil { + t.Fatalf("Marshal of message with oneof set to message: %v", err) + } + if len(b) != 20 { // msg tag/wire (1) + msg len (1) + msg (1 + 1 + 16) + t.Errorf("Incorrect marshal of message with oneof set to message: %v", b) + } + m.Reset() + if err := Unmarshal(b, m); err != nil { + t.Fatalf("Unmarshal of message with oneof set to message: %v", err) + } + ss, ok := m.Union.(*Communique_Msg) + if !ok || ss.Msg.GetStringField() != "deep deep string" { + t.Errorf("After round trip with oneof set to message, Union = %+v", m.Union) + } +} + +func TestInefficientPackedBool(t *testing.T) { + // https://github.com/golang/protobuf/issues/76 + inp := []byte{ + 0x12, 0x02, // 0x12 = 2<<3|2; 2 bytes + // Usually a bool should take a single byte, + // but it is permitted to be any varint. + 0xb9, 0x30, + } + if err := Unmarshal(inp, new(MoreRepeated)); err != nil { + t.Error(err) + } +} + +// Benchmarks + +func testMsg() *GoTest { + pb := initGoTest(true) + const N = 1000 // Internally the library starts much smaller. + pb.F_Int32Repeated = make([]int32, N) + pb.F_DoubleRepeated = make([]float64, N) + for i := 0; i < N; i++ { + pb.F_Int32Repeated[i] = int32(i) + pb.F_DoubleRepeated[i] = float64(i) + } + return pb +} + +func bytesMsg() *GoTest { + pb := initGoTest(true) + buf := make([]byte, 4000) + for i := range buf { + buf[i] = byte(i) + } + pb.F_BytesDefaulted = buf + return pb +} + +func benchmarkMarshal(b *testing.B, pb Message, marshal func(Message) ([]byte, error)) { + d, _ := marshal(pb) + b.SetBytes(int64(len(d))) + b.ResetTimer() + for i := 0; i < b.N; i++ { + marshal(pb) + } +} + +func benchmarkBufferMarshal(b *testing.B, pb Message) { + p := NewBuffer(nil) + benchmarkMarshal(b, pb, func(pb0 Message) ([]byte, error) { + p.Reset() + err := p.Marshal(pb0) + return p.Bytes(), err + }) +} + +func benchmarkSize(b *testing.B, pb Message) { + benchmarkMarshal(b, pb, func(pb0 Message) ([]byte, error) { + Size(pb) + return nil, nil + }) +} + +func newOf(pb Message) Message { + in := reflect.ValueOf(pb) + if in.IsNil() { + return pb + } + return reflect.New(in.Type().Elem()).Interface().(Message) +} + +func benchmarkUnmarshal(b *testing.B, pb Message, unmarshal func([]byte, Message) error) { + d, _ := Marshal(pb) + b.SetBytes(int64(len(d))) + pbd := newOf(pb) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + unmarshal(d, pbd) + } +} + +func benchmarkBufferUnmarshal(b *testing.B, pb Message) { + p := NewBuffer(nil) + benchmarkUnmarshal(b, pb, func(d []byte, pb0 Message) error { + p.SetBuf(d) + return p.Unmarshal(pb0) + }) +} + +// Benchmark{Marshal,BufferMarshal,Size,Unmarshal,BufferUnmarshal}{,Bytes} + +func BenchmarkMarshal(b *testing.B) { + benchmarkMarshal(b, testMsg(), Marshal) +} + +func BenchmarkBufferMarshal(b *testing.B) { + benchmarkBufferMarshal(b, testMsg()) +} + +func BenchmarkSize(b *testing.B) { + benchmarkSize(b, testMsg()) +} + +func BenchmarkUnmarshal(b *testing.B) { + benchmarkUnmarshal(b, testMsg(), Unmarshal) +} + +func BenchmarkBufferUnmarshal(b *testing.B) { + benchmarkBufferUnmarshal(b, testMsg()) +} + +func BenchmarkMarshalBytes(b *testing.B) { + benchmarkMarshal(b, bytesMsg(), Marshal) +} + +func BenchmarkBufferMarshalBytes(b *testing.B) { + benchmarkBufferMarshal(b, bytesMsg()) +} + +func BenchmarkSizeBytes(b *testing.B) { + benchmarkSize(b, bytesMsg()) +} + +func BenchmarkUnmarshalBytes(b *testing.B) { + benchmarkUnmarshal(b, bytesMsg(), Unmarshal) +} + +func BenchmarkBufferUnmarshalBytes(b *testing.B) { + benchmarkBufferUnmarshal(b, bytesMsg()) +} + +func BenchmarkUnmarshalUnrecognizedFields(b *testing.B) { + b.StopTimer() + pb := initGoTestField() + skip := &GoSkipTest{ + SkipInt32: Int32(32), + SkipFixed32: Uint32(3232), + SkipFixed64: Uint64(6464), + SkipString: String("skipper"), + Skipgroup: &GoSkipTest_SkipGroup{ + GroupInt32: Int32(75), + GroupString: String("wxyz"), + }, + } + + pbd := new(GoTestField) + p := NewBuffer(nil) + p.Marshal(pb) + p.Marshal(skip) + p2 := NewBuffer(nil) + + b.StartTimer() + for i := 0; i < b.N; i++ { + p2.SetBuf(p.Bytes()) + p2.Unmarshal(pbd) + } +} diff --git a/vendor/github.com/golang/protobuf/proto/any_test.go b/vendor/github.com/golang/protobuf/proto/any_test.go new file mode 100644 index 000000000..1a3c22ed4 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/any_test.go @@ -0,0 +1,300 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2016 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package proto_test + +import ( + "strings" + "testing" + + "github.com/golang/protobuf/proto" + + pb "github.com/golang/protobuf/proto/proto3_proto" + testpb "github.com/golang/protobuf/proto/testdata" + anypb "github.com/golang/protobuf/ptypes/any" +) + +var ( + expandedMarshaler = proto.TextMarshaler{ExpandAny: true} + expandedCompactMarshaler = proto.TextMarshaler{Compact: true, ExpandAny: true} +) + +// anyEqual reports whether two messages which may be google.protobuf.Any or may +// contain google.protobuf.Any fields are equal. We can't use proto.Equal for +// comparison, because semantically equivalent messages may be marshaled to +// binary in different tag order. Instead, trust that TextMarshaler with +// ExpandAny option works and compare the text marshaling results. +func anyEqual(got, want proto.Message) bool { + // if messages are proto.Equal, no need to marshal. + if proto.Equal(got, want) { + return true + } + g := expandedMarshaler.Text(got) + w := expandedMarshaler.Text(want) + return g == w +} + +type golden struct { + m proto.Message + t, c string +} + +var goldenMessages = makeGolden() + +func makeGolden() []golden { + nested := &pb.Nested{Bunny: "Monty"} + nb, err := proto.Marshal(nested) + if err != nil { + panic(err) + } + m1 := &pb.Message{ + Name: "David", + ResultCount: 47, + Anything: &anypb.Any{TypeUrl: "type.googleapis.com/" + proto.MessageName(nested), Value: nb}, + } + m2 := &pb.Message{ + Name: "David", + ResultCount: 47, + Anything: &anypb.Any{TypeUrl: "http://[::1]/type.googleapis.com/" + proto.MessageName(nested), Value: nb}, + } + m3 := &pb.Message{ + Name: "David", + ResultCount: 47, + Anything: &anypb.Any{TypeUrl: `type.googleapis.com/"/` + proto.MessageName(nested), Value: nb}, + } + m4 := &pb.Message{ + Name: "David", + ResultCount: 47, + Anything: &anypb.Any{TypeUrl: "type.googleapis.com/a/path/" + proto.MessageName(nested), Value: nb}, + } + m5 := &anypb.Any{TypeUrl: "type.googleapis.com/" + proto.MessageName(nested), Value: nb} + + any1 := &testpb.MyMessage{Count: proto.Int32(47), Name: proto.String("David")} + proto.SetExtension(any1, testpb.E_Ext_More, &testpb.Ext{Data: proto.String("foo")}) + proto.SetExtension(any1, testpb.E_Ext_Text, proto.String("bar")) + any1b, err := proto.Marshal(any1) + if err != nil { + panic(err) + } + any2 := &testpb.MyMessage{Count: proto.Int32(42), Bikeshed: testpb.MyMessage_GREEN.Enum(), RepBytes: [][]byte{[]byte("roboto")}} + proto.SetExtension(any2, testpb.E_Ext_More, &testpb.Ext{Data: proto.String("baz")}) + any2b, err := proto.Marshal(any2) + if err != nil { + panic(err) + } + m6 := &pb.Message{ + Name: "David", + ResultCount: 47, + Anything: &anypb.Any{TypeUrl: "type.googleapis.com/" + proto.MessageName(any1), Value: any1b}, + ManyThings: []*anypb.Any{ + &anypb.Any{TypeUrl: "type.googleapis.com/" + proto.MessageName(any2), Value: any2b}, + &anypb.Any{TypeUrl: "type.googleapis.com/" + proto.MessageName(any1), Value: any1b}, + }, + } + + const ( + m1Golden = ` +name: "David" +result_count: 47 +anything: < + [type.googleapis.com/proto3_proto.Nested]: < + bunny: "Monty" + > +> +` + m2Golden = ` +name: "David" +result_count: 47 +anything: < + ["http://[::1]/type.googleapis.com/proto3_proto.Nested"]: < + bunny: "Monty" + > +> +` + m3Golden = ` +name: "David" +result_count: 47 +anything: < + ["type.googleapis.com/\"/proto3_proto.Nested"]: < + bunny: "Monty" + > +> +` + m4Golden = ` +name: "David" +result_count: 47 +anything: < + [type.googleapis.com/a/path/proto3_proto.Nested]: < + bunny: "Monty" + > +> +` + m5Golden = ` +[type.googleapis.com/proto3_proto.Nested]: < + bunny: "Monty" +> +` + m6Golden = ` +name: "David" +result_count: 47 +anything: < + [type.googleapis.com/testdata.MyMessage]: < + count: 47 + name: "David" + [testdata.Ext.more]: < + data: "foo" + > + [testdata.Ext.text]: "bar" + > +> +many_things: < + [type.googleapis.com/testdata.MyMessage]: < + count: 42 + bikeshed: GREEN + rep_bytes: "roboto" + [testdata.Ext.more]: < + data: "baz" + > + > +> +many_things: < + [type.googleapis.com/testdata.MyMessage]: < + count: 47 + name: "David" + [testdata.Ext.more]: < + data: "foo" + > + [testdata.Ext.text]: "bar" + > +> +` + ) + return []golden{ + {m1, strings.TrimSpace(m1Golden) + "\n", strings.TrimSpace(compact(m1Golden)) + " "}, + {m2, strings.TrimSpace(m2Golden) + "\n", strings.TrimSpace(compact(m2Golden)) + " "}, + {m3, strings.TrimSpace(m3Golden) + "\n", strings.TrimSpace(compact(m3Golden)) + " "}, + {m4, strings.TrimSpace(m4Golden) + "\n", strings.TrimSpace(compact(m4Golden)) + " "}, + {m5, strings.TrimSpace(m5Golden) + "\n", strings.TrimSpace(compact(m5Golden)) + " "}, + {m6, strings.TrimSpace(m6Golden) + "\n", strings.TrimSpace(compact(m6Golden)) + " "}, + } +} + +func TestMarshalGolden(t *testing.T) { + for _, tt := range goldenMessages { + if got, want := expandedMarshaler.Text(tt.m), tt.t; got != want { + t.Errorf("message %v: got:\n%s\nwant:\n%s", tt.m, got, want) + } + if got, want := expandedCompactMarshaler.Text(tt.m), tt.c; got != want { + t.Errorf("message %v: got:\n`%s`\nwant:\n`%s`", tt.m, got, want) + } + } +} + +func TestUnmarshalGolden(t *testing.T) { + for _, tt := range goldenMessages { + want := tt.m + got := proto.Clone(tt.m) + got.Reset() + if err := proto.UnmarshalText(tt.t, got); err != nil { + t.Errorf("failed to unmarshal\n%s\nerror: %v", tt.t, err) + } + if !anyEqual(got, want) { + t.Errorf("message:\n%s\ngot:\n%s\nwant:\n%s", tt.t, got, want) + } + got.Reset() + if err := proto.UnmarshalText(tt.c, got); err != nil { + t.Errorf("failed to unmarshal\n%s\nerror: %v", tt.c, err) + } + if !anyEqual(got, want) { + t.Errorf("message:\n%s\ngot:\n%s\nwant:\n%s", tt.c, got, want) + } + } +} + +func TestMarshalUnknownAny(t *testing.T) { + m := &pb.Message{ + Anything: &anypb.Any{ + TypeUrl: "foo", + Value: []byte("bar"), + }, + } + want := `anything: < + type_url: "foo" + value: "bar" +> +` + got := expandedMarshaler.Text(m) + if got != want { + t.Errorf("got\n`%s`\nwant\n`%s`", got, want) + } +} + +func TestAmbiguousAny(t *testing.T) { + pb := &anypb.Any{} + err := proto.UnmarshalText(` + type_url: "ttt/proto3_proto.Nested" + value: "\n\x05Monty" + `, pb) + t.Logf("result: %v (error: %v)", expandedMarshaler.Text(pb), err) + if err != nil { + t.Errorf("failed to parse ambiguous Any message: %v", err) + } +} + +func TestUnmarshalOverwriteAny(t *testing.T) { + pb := &anypb.Any{} + err := proto.UnmarshalText(` + [type.googleapis.com/a/path/proto3_proto.Nested]: < + bunny: "Monty" + > + [type.googleapis.com/a/path/proto3_proto.Nested]: < + bunny: "Rabbit of Caerbannog" + > + `, pb) + want := `line 7: Any message unpacked multiple times, or "type_url" already set` + if err.Error() != want { + t.Errorf("incorrect error.\nHave: %v\nWant: %v", err.Error(), want) + } +} + +func TestUnmarshalAnyMixAndMatch(t *testing.T) { + pb := &anypb.Any{} + err := proto.UnmarshalText(` + value: "\n\x05Monty" + [type.googleapis.com/a/path/proto3_proto.Nested]: < + bunny: "Rabbit of Caerbannog" + > + `, pb) + want := `line 5: Any message unpacked multiple times, or "value" already set` + if err.Error() != want { + t.Errorf("incorrect error.\nHave: %v\nWant: %v", err.Error(), want) + } +} diff --git a/vendor/github.com/golang/protobuf/proto/clone.go b/vendor/github.com/golang/protobuf/proto/clone.go new file mode 100644 index 000000000..e392575b3 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/clone.go @@ -0,0 +1,229 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2011 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Protocol buffer deep copy and merge. +// TODO: RawMessage. + +package proto + +import ( + "log" + "reflect" + "strings" +) + +// Clone returns a deep copy of a protocol buffer. +func Clone(pb Message) Message { + in := reflect.ValueOf(pb) + if in.IsNil() { + return pb + } + + out := reflect.New(in.Type().Elem()) + // out is empty so a merge is a deep copy. + mergeStruct(out.Elem(), in.Elem()) + return out.Interface().(Message) +} + +// Merge merges src into dst. +// Required and optional fields that are set in src will be set to that value in dst. +// Elements of repeated fields will be appended. +// Merge panics if src and dst are not the same type, or if dst is nil. +func Merge(dst, src Message) { + in := reflect.ValueOf(src) + out := reflect.ValueOf(dst) + if out.IsNil() { + panic("proto: nil destination") + } + if in.Type() != out.Type() { + // Explicit test prior to mergeStruct so that mistyped nils will fail + panic("proto: type mismatch") + } + if in.IsNil() { + // Merging nil into non-nil is a quiet no-op + return + } + mergeStruct(out.Elem(), in.Elem()) +} + +func mergeStruct(out, in reflect.Value) { + sprop := GetProperties(in.Type()) + for i := 0; i < in.NumField(); i++ { + f := in.Type().Field(i) + if strings.HasPrefix(f.Name, "XXX_") { + continue + } + mergeAny(out.Field(i), in.Field(i), false, sprop.Prop[i]) + } + + if emIn, ok := extendable(in.Addr().Interface()); ok { + emOut, _ := extendable(out.Addr().Interface()) + mIn, muIn := emIn.extensionsRead() + if mIn != nil { + mOut := emOut.extensionsWrite() + muIn.Lock() + mergeExtension(mOut, mIn) + muIn.Unlock() + } + } + + uf := in.FieldByName("XXX_unrecognized") + if !uf.IsValid() { + return + } + uin := uf.Bytes() + if len(uin) > 0 { + out.FieldByName("XXX_unrecognized").SetBytes(append([]byte(nil), uin...)) + } +} + +// mergeAny performs a merge between two values of the same type. +// viaPtr indicates whether the values were indirected through a pointer (implying proto2). +// prop is set if this is a struct field (it may be nil). +func mergeAny(out, in reflect.Value, viaPtr bool, prop *Properties) { + if in.Type() == protoMessageType { + if !in.IsNil() { + if out.IsNil() { + out.Set(reflect.ValueOf(Clone(in.Interface().(Message)))) + } else { + Merge(out.Interface().(Message), in.Interface().(Message)) + } + } + return + } + switch in.Kind() { + case reflect.Bool, reflect.Float32, reflect.Float64, reflect.Int32, reflect.Int64, + reflect.String, reflect.Uint32, reflect.Uint64: + if !viaPtr && isProto3Zero(in) { + return + } + out.Set(in) + case reflect.Interface: + // Probably a oneof field; copy non-nil values. + if in.IsNil() { + return + } + // Allocate destination if it is not set, or set to a different type. + // Otherwise we will merge as normal. + if out.IsNil() || out.Elem().Type() != in.Elem().Type() { + out.Set(reflect.New(in.Elem().Elem().Type())) // interface -> *T -> T -> new(T) + } + mergeAny(out.Elem(), in.Elem(), false, nil) + case reflect.Map: + if in.Len() == 0 { + return + } + if out.IsNil() { + out.Set(reflect.MakeMap(in.Type())) + } + // For maps with value types of *T or []byte we need to deep copy each value. + elemKind := in.Type().Elem().Kind() + for _, key := range in.MapKeys() { + var val reflect.Value + switch elemKind { + case reflect.Ptr: + val = reflect.New(in.Type().Elem().Elem()) + mergeAny(val, in.MapIndex(key), false, nil) + case reflect.Slice: + val = in.MapIndex(key) + val = reflect.ValueOf(append([]byte{}, val.Bytes()...)) + default: + val = in.MapIndex(key) + } + out.SetMapIndex(key, val) + } + case reflect.Ptr: + if in.IsNil() { + return + } + if out.IsNil() { + out.Set(reflect.New(in.Elem().Type())) + } + mergeAny(out.Elem(), in.Elem(), true, nil) + case reflect.Slice: + if in.IsNil() { + return + } + if in.Type().Elem().Kind() == reflect.Uint8 { + // []byte is a scalar bytes field, not a repeated field. + + // Edge case: if this is in a proto3 message, a zero length + // bytes field is considered the zero value, and should not + // be merged. + if prop != nil && prop.proto3 && in.Len() == 0 { + return + } + + // Make a deep copy. + // Append to []byte{} instead of []byte(nil) so that we never end up + // with a nil result. + out.SetBytes(append([]byte{}, in.Bytes()...)) + return + } + n := in.Len() + if out.IsNil() { + out.Set(reflect.MakeSlice(in.Type(), 0, n)) + } + switch in.Type().Elem().Kind() { + case reflect.Bool, reflect.Float32, reflect.Float64, reflect.Int32, reflect.Int64, + reflect.String, reflect.Uint32, reflect.Uint64: + out.Set(reflect.AppendSlice(out, in)) + default: + for i := 0; i < n; i++ { + x := reflect.Indirect(reflect.New(in.Type().Elem())) + mergeAny(x, in.Index(i), false, nil) + out.Set(reflect.Append(out, x)) + } + } + case reflect.Struct: + mergeStruct(out, in) + default: + // unknown type, so not a protocol buffer + log.Printf("proto: don't know how to copy %v", in) + } +} + +func mergeExtension(out, in map[int32]Extension) { + for extNum, eIn := range in { + eOut := Extension{desc: eIn.desc} + if eIn.value != nil { + v := reflect.New(reflect.TypeOf(eIn.value)).Elem() + mergeAny(v, reflect.ValueOf(eIn.value), false, nil) + eOut.value = v.Interface() + } + if eIn.enc != nil { + eOut.enc = make([]byte, len(eIn.enc)) + copy(eOut.enc, eIn.enc) + } + + out[extNum] = eOut + } +} diff --git a/vendor/github.com/golang/protobuf/proto/clone_test.go b/vendor/github.com/golang/protobuf/proto/clone_test.go new file mode 100644 index 000000000..f607ff49e --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/clone_test.go @@ -0,0 +1,300 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2011 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package proto_test + +import ( + "testing" + + "github.com/golang/protobuf/proto" + + proto3pb "github.com/golang/protobuf/proto/proto3_proto" + pb "github.com/golang/protobuf/proto/testdata" +) + +var cloneTestMessage = &pb.MyMessage{ + Count: proto.Int32(42), + Name: proto.String("Dave"), + Pet: []string{"bunny", "kitty", "horsey"}, + Inner: &pb.InnerMessage{ + Host: proto.String("niles"), + Port: proto.Int32(9099), + Connected: proto.Bool(true), + }, + Others: []*pb.OtherMessage{ + { + Value: []byte("some bytes"), + }, + }, + Somegroup: &pb.MyMessage_SomeGroup{ + GroupField: proto.Int32(6), + }, + RepBytes: [][]byte{[]byte("sham"), []byte("wow")}, +} + +func init() { + ext := &pb.Ext{ + Data: proto.String("extension"), + } + if err := proto.SetExtension(cloneTestMessage, pb.E_Ext_More, ext); err != nil { + panic("SetExtension: " + err.Error()) + } +} + +func TestClone(t *testing.T) { + m := proto.Clone(cloneTestMessage).(*pb.MyMessage) + if !proto.Equal(m, cloneTestMessage) { + t.Errorf("Clone(%v) = %v", cloneTestMessage, m) + } + + // Verify it was a deep copy. + *m.Inner.Port++ + if proto.Equal(m, cloneTestMessage) { + t.Error("Mutating clone changed the original") + } + // Byte fields and repeated fields should be copied. + if &m.Pet[0] == &cloneTestMessage.Pet[0] { + t.Error("Pet: repeated field not copied") + } + if &m.Others[0] == &cloneTestMessage.Others[0] { + t.Error("Others: repeated field not copied") + } + if &m.Others[0].Value[0] == &cloneTestMessage.Others[0].Value[0] { + t.Error("Others[0].Value: bytes field not copied") + } + if &m.RepBytes[0] == &cloneTestMessage.RepBytes[0] { + t.Error("RepBytes: repeated field not copied") + } + if &m.RepBytes[0][0] == &cloneTestMessage.RepBytes[0][0] { + t.Error("RepBytes[0]: bytes field not copied") + } +} + +func TestCloneNil(t *testing.T) { + var m *pb.MyMessage + if c := proto.Clone(m); !proto.Equal(m, c) { + t.Errorf("Clone(%v) = %v", m, c) + } +} + +var mergeTests = []struct { + src, dst, want proto.Message +}{ + { + src: &pb.MyMessage{ + Count: proto.Int32(42), + }, + dst: &pb.MyMessage{ + Name: proto.String("Dave"), + }, + want: &pb.MyMessage{ + Count: proto.Int32(42), + Name: proto.String("Dave"), + }, + }, + { + src: &pb.MyMessage{ + Inner: &pb.InnerMessage{ + Host: proto.String("hey"), + Connected: proto.Bool(true), + }, + Pet: []string{"horsey"}, + Others: []*pb.OtherMessage{ + { + Value: []byte("some bytes"), + }, + }, + }, + dst: &pb.MyMessage{ + Inner: &pb.InnerMessage{ + Host: proto.String("niles"), + Port: proto.Int32(9099), + }, + Pet: []string{"bunny", "kitty"}, + Others: []*pb.OtherMessage{ + { + Key: proto.Int64(31415926535), + }, + { + // Explicitly test a src=nil field + Inner: nil, + }, + }, + }, + want: &pb.MyMessage{ + Inner: &pb.InnerMessage{ + Host: proto.String("hey"), + Connected: proto.Bool(true), + Port: proto.Int32(9099), + }, + Pet: []string{"bunny", "kitty", "horsey"}, + Others: []*pb.OtherMessage{ + { + Key: proto.Int64(31415926535), + }, + {}, + { + Value: []byte("some bytes"), + }, + }, + }, + }, + { + src: &pb.MyMessage{ + RepBytes: [][]byte{[]byte("wow")}, + }, + dst: &pb.MyMessage{ + Somegroup: &pb.MyMessage_SomeGroup{ + GroupField: proto.Int32(6), + }, + RepBytes: [][]byte{[]byte("sham")}, + }, + want: &pb.MyMessage{ + Somegroup: &pb.MyMessage_SomeGroup{ + GroupField: proto.Int32(6), + }, + RepBytes: [][]byte{[]byte("sham"), []byte("wow")}, + }, + }, + // Check that a scalar bytes field replaces rather than appends. + { + src: &pb.OtherMessage{Value: []byte("foo")}, + dst: &pb.OtherMessage{Value: []byte("bar")}, + want: &pb.OtherMessage{Value: []byte("foo")}, + }, + { + src: &pb.MessageWithMap{ + NameMapping: map[int32]string{6: "Nigel"}, + MsgMapping: map[int64]*pb.FloatingPoint{ + 0x4001: &pb.FloatingPoint{F: proto.Float64(2.0)}, + 0x4002: &pb.FloatingPoint{ + F: proto.Float64(2.0), + }, + }, + ByteMapping: map[bool][]byte{true: []byte("wowsa")}, + }, + dst: &pb.MessageWithMap{ + NameMapping: map[int32]string{ + 6: "Bruce", // should be overwritten + 7: "Andrew", + }, + MsgMapping: map[int64]*pb.FloatingPoint{ + 0x4002: &pb.FloatingPoint{ + F: proto.Float64(3.0), + Exact: proto.Bool(true), + }, // the entire message should be overwritten + }, + }, + want: &pb.MessageWithMap{ + NameMapping: map[int32]string{ + 6: "Nigel", + 7: "Andrew", + }, + MsgMapping: map[int64]*pb.FloatingPoint{ + 0x4001: &pb.FloatingPoint{F: proto.Float64(2.0)}, + 0x4002: &pb.FloatingPoint{ + F: proto.Float64(2.0), + }, + }, + ByteMapping: map[bool][]byte{true: []byte("wowsa")}, + }, + }, + // proto3 shouldn't merge zero values, + // in the same way that proto2 shouldn't merge nils. + { + src: &proto3pb.Message{ + Name: "Aaron", + Data: []byte(""), // zero value, but not nil + }, + dst: &proto3pb.Message{ + HeightInCm: 176, + Data: []byte("texas!"), + }, + want: &proto3pb.Message{ + Name: "Aaron", + HeightInCm: 176, + Data: []byte("texas!"), + }, + }, + // Oneof fields should merge by assignment. + { + src: &pb.Communique{ + Union: &pb.Communique_Number{41}, + }, + dst: &pb.Communique{ + Union: &pb.Communique_Name{"Bobby Tables"}, + }, + want: &pb.Communique{ + Union: &pb.Communique_Number{41}, + }, + }, + // Oneof nil is the same as not set. + { + src: &pb.Communique{}, + dst: &pb.Communique{ + Union: &pb.Communique_Name{"Bobby Tables"}, + }, + want: &pb.Communique{ + Union: &pb.Communique_Name{"Bobby Tables"}, + }, + }, + { + src: &proto3pb.Message{ + Terrain: map[string]*proto3pb.Nested{ + "kay_a": &proto3pb.Nested{Cute: true}, // replace + "kay_b": &proto3pb.Nested{Bunny: "rabbit"}, // insert + }, + }, + dst: &proto3pb.Message{ + Terrain: map[string]*proto3pb.Nested{ + "kay_a": &proto3pb.Nested{Bunny: "lost"}, // replaced + "kay_c": &proto3pb.Nested{Bunny: "bunny"}, // keep + }, + }, + want: &proto3pb.Message{ + Terrain: map[string]*proto3pb.Nested{ + "kay_a": &proto3pb.Nested{Cute: true}, + "kay_b": &proto3pb.Nested{Bunny: "rabbit"}, + "kay_c": &proto3pb.Nested{Bunny: "bunny"}, + }, + }, + }, +} + +func TestMerge(t *testing.T) { + for _, m := range mergeTests { + got := proto.Clone(m.dst) + proto.Merge(got, m.src) + if !proto.Equal(got, m.want) { + t.Errorf("Merge(%v, %v)\n got %v\nwant %v\n", m.dst, m.src, got, m.want) + } + } +} diff --git a/vendor/github.com/golang/protobuf/proto/decode.go b/vendor/github.com/golang/protobuf/proto/decode.go new file mode 100644 index 000000000..aa207298f --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/decode.go @@ -0,0 +1,970 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2010 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package proto + +/* + * Routines for decoding protocol buffer data to construct in-memory representations. + */ + +import ( + "errors" + "fmt" + "io" + "os" + "reflect" +) + +// errOverflow is returned when an integer is too large to be represented. +var errOverflow = errors.New("proto: integer overflow") + +// ErrInternalBadWireType is returned by generated code when an incorrect +// wire type is encountered. It does not get returned to user code. +var ErrInternalBadWireType = errors.New("proto: internal error: bad wiretype for oneof") + +// The fundamental decoders that interpret bytes on the wire. +// Those that take integer types all return uint64 and are +// therefore of type valueDecoder. + +// DecodeVarint reads a varint-encoded integer from the slice. +// It returns the integer and the number of bytes consumed, or +// zero if there is not enough. +// This is the format for the +// int32, int64, uint32, uint64, bool, and enum +// protocol buffer types. +func DecodeVarint(buf []byte) (x uint64, n int) { + for shift := uint(0); shift < 64; shift += 7 { + if n >= len(buf) { + return 0, 0 + } + b := uint64(buf[n]) + n++ + x |= (b & 0x7F) << shift + if (b & 0x80) == 0 { + return x, n + } + } + + // The number is too large to represent in a 64-bit value. + return 0, 0 +} + +func (p *Buffer) decodeVarintSlow() (x uint64, err error) { + i := p.index + l := len(p.buf) + + for shift := uint(0); shift < 64; shift += 7 { + if i >= l { + err = io.ErrUnexpectedEOF + return + } + b := p.buf[i] + i++ + x |= (uint64(b) & 0x7F) << shift + if b < 0x80 { + p.index = i + return + } + } + + // The number is too large to represent in a 64-bit value. + err = errOverflow + return +} + +// DecodeVarint reads a varint-encoded integer from the Buffer. +// This is the format for the +// int32, int64, uint32, uint64, bool, and enum +// protocol buffer types. +func (p *Buffer) DecodeVarint() (x uint64, err error) { + i := p.index + buf := p.buf + + if i >= len(buf) { + return 0, io.ErrUnexpectedEOF + } else if buf[i] < 0x80 { + p.index++ + return uint64(buf[i]), nil + } else if len(buf)-i < 10 { + return p.decodeVarintSlow() + } + + var b uint64 + // we already checked the first byte + x = uint64(buf[i]) - 0x80 + i++ + + b = uint64(buf[i]) + i++ + x += b << 7 + if b&0x80 == 0 { + goto done + } + x -= 0x80 << 7 + + b = uint64(buf[i]) + i++ + x += b << 14 + if b&0x80 == 0 { + goto done + } + x -= 0x80 << 14 + + b = uint64(buf[i]) + i++ + x += b << 21 + if b&0x80 == 0 { + goto done + } + x -= 0x80 << 21 + + b = uint64(buf[i]) + i++ + x += b << 28 + if b&0x80 == 0 { + goto done + } + x -= 0x80 << 28 + + b = uint64(buf[i]) + i++ + x += b << 35 + if b&0x80 == 0 { + goto done + } + x -= 0x80 << 35 + + b = uint64(buf[i]) + i++ + x += b << 42 + if b&0x80 == 0 { + goto done + } + x -= 0x80 << 42 + + b = uint64(buf[i]) + i++ + x += b << 49 + if b&0x80 == 0 { + goto done + } + x -= 0x80 << 49 + + b = uint64(buf[i]) + i++ + x += b << 56 + if b&0x80 == 0 { + goto done + } + x -= 0x80 << 56 + + b = uint64(buf[i]) + i++ + x += b << 63 + if b&0x80 == 0 { + goto done + } + // x -= 0x80 << 63 // Always zero. + + return 0, errOverflow + +done: + p.index = i + return x, nil +} + +// DecodeFixed64 reads a 64-bit integer from the Buffer. +// This is the format for the +// fixed64, sfixed64, and double protocol buffer types. +func (p *Buffer) DecodeFixed64() (x uint64, err error) { + // x, err already 0 + i := p.index + 8 + if i < 0 || i > len(p.buf) { + err = io.ErrUnexpectedEOF + return + } + p.index = i + + x = uint64(p.buf[i-8]) + x |= uint64(p.buf[i-7]) << 8 + x |= uint64(p.buf[i-6]) << 16 + x |= uint64(p.buf[i-5]) << 24 + x |= uint64(p.buf[i-4]) << 32 + x |= uint64(p.buf[i-3]) << 40 + x |= uint64(p.buf[i-2]) << 48 + x |= uint64(p.buf[i-1]) << 56 + return +} + +// DecodeFixed32 reads a 32-bit integer from the Buffer. +// This is the format for the +// fixed32, sfixed32, and float protocol buffer types. +func (p *Buffer) DecodeFixed32() (x uint64, err error) { + // x, err already 0 + i := p.index + 4 + if i < 0 || i > len(p.buf) { + err = io.ErrUnexpectedEOF + return + } + p.index = i + + x = uint64(p.buf[i-4]) + x |= uint64(p.buf[i-3]) << 8 + x |= uint64(p.buf[i-2]) << 16 + x |= uint64(p.buf[i-1]) << 24 + return +} + +// DecodeZigzag64 reads a zigzag-encoded 64-bit integer +// from the Buffer. +// This is the format used for the sint64 protocol buffer type. +func (p *Buffer) DecodeZigzag64() (x uint64, err error) { + x, err = p.DecodeVarint() + if err != nil { + return + } + x = (x >> 1) ^ uint64((int64(x&1)<<63)>>63) + return +} + +// DecodeZigzag32 reads a zigzag-encoded 32-bit integer +// from the Buffer. +// This is the format used for the sint32 protocol buffer type. +func (p *Buffer) DecodeZigzag32() (x uint64, err error) { + x, err = p.DecodeVarint() + if err != nil { + return + } + x = uint64((uint32(x) >> 1) ^ uint32((int32(x&1)<<31)>>31)) + return +} + +// These are not ValueDecoders: they produce an array of bytes or a string. +// bytes, embedded messages + +// DecodeRawBytes reads a count-delimited byte buffer from the Buffer. +// This is the format used for the bytes protocol buffer +// type and for embedded messages. +func (p *Buffer) DecodeRawBytes(alloc bool) (buf []byte, err error) { + n, err := p.DecodeVarint() + if err != nil { + return nil, err + } + + nb := int(n) + if nb < 0 { + return nil, fmt.Errorf("proto: bad byte length %d", nb) + } + end := p.index + nb + if end < p.index || end > len(p.buf) { + return nil, io.ErrUnexpectedEOF + } + + if !alloc { + // todo: check if can get more uses of alloc=false + buf = p.buf[p.index:end] + p.index += nb + return + } + + buf = make([]byte, nb) + copy(buf, p.buf[p.index:]) + p.index += nb + return +} + +// DecodeStringBytes reads an encoded string from the Buffer. +// This is the format used for the proto2 string type. +func (p *Buffer) DecodeStringBytes() (s string, err error) { + buf, err := p.DecodeRawBytes(false) + if err != nil { + return + } + return string(buf), nil +} + +// Skip the next item in the buffer. Its wire type is decoded and presented as an argument. +// If the protocol buffer has extensions, and the field matches, add it as an extension. +// Otherwise, if the XXX_unrecognized field exists, append the skipped data there. +func (o *Buffer) skipAndSave(t reflect.Type, tag, wire int, base structPointer, unrecField field) error { + oi := o.index + + err := o.skip(t, tag, wire) + if err != nil { + return err + } + + if !unrecField.IsValid() { + return nil + } + + ptr := structPointer_Bytes(base, unrecField) + + // Add the skipped field to struct field + obuf := o.buf + + o.buf = *ptr + o.EncodeVarint(uint64(tag<<3 | wire)) + *ptr = append(o.buf, obuf[oi:o.index]...) + + o.buf = obuf + + return nil +} + +// Skip the next item in the buffer. Its wire type is decoded and presented as an argument. +func (o *Buffer) skip(t reflect.Type, tag, wire int) error { + + var u uint64 + var err error + + switch wire { + case WireVarint: + _, err = o.DecodeVarint() + case WireFixed64: + _, err = o.DecodeFixed64() + case WireBytes: + _, err = o.DecodeRawBytes(false) + case WireFixed32: + _, err = o.DecodeFixed32() + case WireStartGroup: + for { + u, err = o.DecodeVarint() + if err != nil { + break + } + fwire := int(u & 0x7) + if fwire == WireEndGroup { + break + } + ftag := int(u >> 3) + err = o.skip(t, ftag, fwire) + if err != nil { + break + } + } + default: + err = fmt.Errorf("proto: can't skip unknown wire type %d for %s", wire, t) + } + return err +} + +// Unmarshaler is the interface representing objects that can +// unmarshal themselves. The method should reset the receiver before +// decoding starts. The argument points to data that may be +// overwritten, so implementations should not keep references to the +// buffer. +type Unmarshaler interface { + Unmarshal([]byte) error +} + +// Unmarshal parses the protocol buffer representation in buf and places the +// decoded result in pb. If the struct underlying pb does not match +// the data in buf, the results can be unpredictable. +// +// Unmarshal resets pb before starting to unmarshal, so any +// existing data in pb is always removed. Use UnmarshalMerge +// to preserve and append to existing data. +func Unmarshal(buf []byte, pb Message) error { + pb.Reset() + return UnmarshalMerge(buf, pb) +} + +// UnmarshalMerge parses the protocol buffer representation in buf and +// writes the decoded result to pb. If the struct underlying pb does not match +// the data in buf, the results can be unpredictable. +// +// UnmarshalMerge merges into existing data in pb. +// Most code should use Unmarshal instead. +func UnmarshalMerge(buf []byte, pb Message) error { + // If the object can unmarshal itself, let it. + if u, ok := pb.(Unmarshaler); ok { + return u.Unmarshal(buf) + } + return NewBuffer(buf).Unmarshal(pb) +} + +// DecodeMessage reads a count-delimited message from the Buffer. +func (p *Buffer) DecodeMessage(pb Message) error { + enc, err := p.DecodeRawBytes(false) + if err != nil { + return err + } + return NewBuffer(enc).Unmarshal(pb) +} + +// DecodeGroup reads a tag-delimited group from the Buffer. +func (p *Buffer) DecodeGroup(pb Message) error { + typ, base, err := getbase(pb) + if err != nil { + return err + } + return p.unmarshalType(typ.Elem(), GetProperties(typ.Elem()), true, base) +} + +// Unmarshal parses the protocol buffer representation in the +// Buffer and places the decoded result in pb. If the struct +// underlying pb does not match the data in the buffer, the results can be +// unpredictable. +// +// Unlike proto.Unmarshal, this does not reset pb before starting to unmarshal. +func (p *Buffer) Unmarshal(pb Message) error { + // If the object can unmarshal itself, let it. + if u, ok := pb.(Unmarshaler); ok { + err := u.Unmarshal(p.buf[p.index:]) + p.index = len(p.buf) + return err + } + + typ, base, err := getbase(pb) + if err != nil { + return err + } + + err = p.unmarshalType(typ.Elem(), GetProperties(typ.Elem()), false, base) + + if collectStats { + stats.Decode++ + } + + return err +} + +// unmarshalType does the work of unmarshaling a structure. +func (o *Buffer) unmarshalType(st reflect.Type, prop *StructProperties, is_group bool, base structPointer) error { + var state errorState + required, reqFields := prop.reqCount, uint64(0) + + var err error + for err == nil && o.index < len(o.buf) { + oi := o.index + var u uint64 + u, err = o.DecodeVarint() + if err != nil { + break + } + wire := int(u & 0x7) + if wire == WireEndGroup { + if is_group { + if required > 0 { + // Not enough information to determine the exact field. + // (See below.) + return &RequiredNotSetError{"{Unknown}"} + } + return nil // input is satisfied + } + return fmt.Errorf("proto: %s: wiretype end group for non-group", st) + } + tag := int(u >> 3) + if tag <= 0 { + return fmt.Errorf("proto: %s: illegal tag %d (wire type %d)", st, tag, wire) + } + fieldnum, ok := prop.decoderTags.get(tag) + if !ok { + // Maybe it's an extension? + if prop.extendable { + if e, _ := extendable(structPointer_Interface(base, st)); isExtensionField(e, int32(tag)) { + if err = o.skip(st, tag, wire); err == nil { + extmap := e.extensionsWrite() + ext := extmap[int32(tag)] // may be missing + ext.enc = append(ext.enc, o.buf[oi:o.index]...) + extmap[int32(tag)] = ext + } + continue + } + } + // Maybe it's a oneof? + if prop.oneofUnmarshaler != nil { + m := structPointer_Interface(base, st).(Message) + // First return value indicates whether tag is a oneof field. + ok, err = prop.oneofUnmarshaler(m, tag, wire, o) + if err == ErrInternalBadWireType { + // Map the error to something more descriptive. + // Do the formatting here to save generated code space. + err = fmt.Errorf("bad wiretype for oneof field in %T", m) + } + if ok { + continue + } + } + err = o.skipAndSave(st, tag, wire, base, prop.unrecField) + continue + } + p := prop.Prop[fieldnum] + + if p.dec == nil { + fmt.Fprintf(os.Stderr, "proto: no protobuf decoder for %s.%s\n", st, st.Field(fieldnum).Name) + continue + } + dec := p.dec + if wire != WireStartGroup && wire != p.WireType { + if wire == WireBytes && p.packedDec != nil { + // a packable field + dec = p.packedDec + } else { + err = fmt.Errorf("proto: bad wiretype for field %s.%s: got wiretype %d, want %d", st, st.Field(fieldnum).Name, wire, p.WireType) + continue + } + } + decErr := dec(o, p, base) + if decErr != nil && !state.shouldContinue(decErr, p) { + err = decErr + } + if err == nil && p.Required { + // Successfully decoded a required field. + if tag <= 64 { + // use bitmap for fields 1-64 to catch field reuse. + var mask uint64 = 1 << uint64(tag-1) + if reqFields&mask == 0 { + // new required field + reqFields |= mask + required-- + } + } else { + // This is imprecise. It can be fooled by a required field + // with a tag > 64 that is encoded twice; that's very rare. + // A fully correct implementation would require allocating + // a data structure, which we would like to avoid. + required-- + } + } + } + if err == nil { + if is_group { + return io.ErrUnexpectedEOF + } + if state.err != nil { + return state.err + } + if required > 0 { + // Not enough information to determine the exact field. If we use extra + // CPU, we could determine the field only if the missing required field + // has a tag <= 64 and we check reqFields. + return &RequiredNotSetError{"{Unknown}"} + } + } + return err +} + +// Individual type decoders +// For each, +// u is the decoded value, +// v is a pointer to the field (pointer) in the struct + +// Sizes of the pools to allocate inside the Buffer. +// The goal is modest amortization and allocation +// on at least 16-byte boundaries. +const ( + boolPoolSize = 16 + uint32PoolSize = 8 + uint64PoolSize = 4 +) + +// Decode a bool. +func (o *Buffer) dec_bool(p *Properties, base structPointer) error { + u, err := p.valDec(o) + if err != nil { + return err + } + if len(o.bools) == 0 { + o.bools = make([]bool, boolPoolSize) + } + o.bools[0] = u != 0 + *structPointer_Bool(base, p.field) = &o.bools[0] + o.bools = o.bools[1:] + return nil +} + +func (o *Buffer) dec_proto3_bool(p *Properties, base structPointer) error { + u, err := p.valDec(o) + if err != nil { + return err + } + *structPointer_BoolVal(base, p.field) = u != 0 + return nil +} + +// Decode an int32. +func (o *Buffer) dec_int32(p *Properties, base structPointer) error { + u, err := p.valDec(o) + if err != nil { + return err + } + word32_Set(structPointer_Word32(base, p.field), o, uint32(u)) + return nil +} + +func (o *Buffer) dec_proto3_int32(p *Properties, base structPointer) error { + u, err := p.valDec(o) + if err != nil { + return err + } + word32Val_Set(structPointer_Word32Val(base, p.field), uint32(u)) + return nil +} + +// Decode an int64. +func (o *Buffer) dec_int64(p *Properties, base structPointer) error { + u, err := p.valDec(o) + if err != nil { + return err + } + word64_Set(structPointer_Word64(base, p.field), o, u) + return nil +} + +func (o *Buffer) dec_proto3_int64(p *Properties, base structPointer) error { + u, err := p.valDec(o) + if err != nil { + return err + } + word64Val_Set(structPointer_Word64Val(base, p.field), o, u) + return nil +} + +// Decode a string. +func (o *Buffer) dec_string(p *Properties, base structPointer) error { + s, err := o.DecodeStringBytes() + if err != nil { + return err + } + *structPointer_String(base, p.field) = &s + return nil +} + +func (o *Buffer) dec_proto3_string(p *Properties, base structPointer) error { + s, err := o.DecodeStringBytes() + if err != nil { + return err + } + *structPointer_StringVal(base, p.field) = s + return nil +} + +// Decode a slice of bytes ([]byte). +func (o *Buffer) dec_slice_byte(p *Properties, base structPointer) error { + b, err := o.DecodeRawBytes(true) + if err != nil { + return err + } + *structPointer_Bytes(base, p.field) = b + return nil +} + +// Decode a slice of bools ([]bool). +func (o *Buffer) dec_slice_bool(p *Properties, base structPointer) error { + u, err := p.valDec(o) + if err != nil { + return err + } + v := structPointer_BoolSlice(base, p.field) + *v = append(*v, u != 0) + return nil +} + +// Decode a slice of bools ([]bool) in packed format. +func (o *Buffer) dec_slice_packed_bool(p *Properties, base structPointer) error { + v := structPointer_BoolSlice(base, p.field) + + nn, err := o.DecodeVarint() + if err != nil { + return err + } + nb := int(nn) // number of bytes of encoded bools + fin := o.index + nb + if fin < o.index { + return errOverflow + } + + y := *v + for o.index < fin { + u, err := p.valDec(o) + if err != nil { + return err + } + y = append(y, u != 0) + } + + *v = y + return nil +} + +// Decode a slice of int32s ([]int32). +func (o *Buffer) dec_slice_int32(p *Properties, base structPointer) error { + u, err := p.valDec(o) + if err != nil { + return err + } + structPointer_Word32Slice(base, p.field).Append(uint32(u)) + return nil +} + +// Decode a slice of int32s ([]int32) in packed format. +func (o *Buffer) dec_slice_packed_int32(p *Properties, base structPointer) error { + v := structPointer_Word32Slice(base, p.field) + + nn, err := o.DecodeVarint() + if err != nil { + return err + } + nb := int(nn) // number of bytes of encoded int32s + + fin := o.index + nb + if fin < o.index { + return errOverflow + } + for o.index < fin { + u, err := p.valDec(o) + if err != nil { + return err + } + v.Append(uint32(u)) + } + return nil +} + +// Decode a slice of int64s ([]int64). +func (o *Buffer) dec_slice_int64(p *Properties, base structPointer) error { + u, err := p.valDec(o) + if err != nil { + return err + } + + structPointer_Word64Slice(base, p.field).Append(u) + return nil +} + +// Decode a slice of int64s ([]int64) in packed format. +func (o *Buffer) dec_slice_packed_int64(p *Properties, base structPointer) error { + v := structPointer_Word64Slice(base, p.field) + + nn, err := o.DecodeVarint() + if err != nil { + return err + } + nb := int(nn) // number of bytes of encoded int64s + + fin := o.index + nb + if fin < o.index { + return errOverflow + } + for o.index < fin { + u, err := p.valDec(o) + if err != nil { + return err + } + v.Append(u) + } + return nil +} + +// Decode a slice of strings ([]string). +func (o *Buffer) dec_slice_string(p *Properties, base structPointer) error { + s, err := o.DecodeStringBytes() + if err != nil { + return err + } + v := structPointer_StringSlice(base, p.field) + *v = append(*v, s) + return nil +} + +// Decode a slice of slice of bytes ([][]byte). +func (o *Buffer) dec_slice_slice_byte(p *Properties, base structPointer) error { + b, err := o.DecodeRawBytes(true) + if err != nil { + return err + } + v := structPointer_BytesSlice(base, p.field) + *v = append(*v, b) + return nil +} + +// Decode a map field. +func (o *Buffer) dec_new_map(p *Properties, base structPointer) error { + raw, err := o.DecodeRawBytes(false) + if err != nil { + return err + } + oi := o.index // index at the end of this map entry + o.index -= len(raw) // move buffer back to start of map entry + + mptr := structPointer_NewAt(base, p.field, p.mtype) // *map[K]V + if mptr.Elem().IsNil() { + mptr.Elem().Set(reflect.MakeMap(mptr.Type().Elem())) + } + v := mptr.Elem() // map[K]V + + // Prepare addressable doubly-indirect placeholders for the key and value types. + // See enc_new_map for why. + keyptr := reflect.New(reflect.PtrTo(p.mtype.Key())).Elem() // addressable *K + keybase := toStructPointer(keyptr.Addr()) // **K + + var valbase structPointer + var valptr reflect.Value + switch p.mtype.Elem().Kind() { + case reflect.Slice: + // []byte + var dummy []byte + valptr = reflect.ValueOf(&dummy) // *[]byte + valbase = toStructPointer(valptr) // *[]byte + case reflect.Ptr: + // message; valptr is **Msg; need to allocate the intermediate pointer + valptr = reflect.New(reflect.PtrTo(p.mtype.Elem())).Elem() // addressable *V + valptr.Set(reflect.New(valptr.Type().Elem())) + valbase = toStructPointer(valptr) + default: + // everything else + valptr = reflect.New(reflect.PtrTo(p.mtype.Elem())).Elem() // addressable *V + valbase = toStructPointer(valptr.Addr()) // **V + } + + // Decode. + // This parses a restricted wire format, namely the encoding of a message + // with two fields. See enc_new_map for the format. + for o.index < oi { + // tagcode for key and value properties are always a single byte + // because they have tags 1 and 2. + tagcode := o.buf[o.index] + o.index++ + switch tagcode { + case p.mkeyprop.tagcode[0]: + if err := p.mkeyprop.dec(o, p.mkeyprop, keybase); err != nil { + return err + } + case p.mvalprop.tagcode[0]: + if err := p.mvalprop.dec(o, p.mvalprop, valbase); err != nil { + return err + } + default: + // TODO: Should we silently skip this instead? + return fmt.Errorf("proto: bad map data tag %d", raw[0]) + } + } + keyelem, valelem := keyptr.Elem(), valptr.Elem() + if !keyelem.IsValid() { + keyelem = reflect.Zero(p.mtype.Key()) + } + if !valelem.IsValid() { + valelem = reflect.Zero(p.mtype.Elem()) + } + + v.SetMapIndex(keyelem, valelem) + return nil +} + +// Decode a group. +func (o *Buffer) dec_struct_group(p *Properties, base structPointer) error { + bas := structPointer_GetStructPointer(base, p.field) + if structPointer_IsNil(bas) { + // allocate new nested message + bas = toStructPointer(reflect.New(p.stype)) + structPointer_SetStructPointer(base, p.field, bas) + } + return o.unmarshalType(p.stype, p.sprop, true, bas) +} + +// Decode an embedded message. +func (o *Buffer) dec_struct_message(p *Properties, base structPointer) (err error) { + raw, e := o.DecodeRawBytes(false) + if e != nil { + return e + } + + bas := structPointer_GetStructPointer(base, p.field) + if structPointer_IsNil(bas) { + // allocate new nested message + bas = toStructPointer(reflect.New(p.stype)) + structPointer_SetStructPointer(base, p.field, bas) + } + + // If the object can unmarshal itself, let it. + if p.isUnmarshaler { + iv := structPointer_Interface(bas, p.stype) + return iv.(Unmarshaler).Unmarshal(raw) + } + + obuf := o.buf + oi := o.index + o.buf = raw + o.index = 0 + + err = o.unmarshalType(p.stype, p.sprop, false, bas) + o.buf = obuf + o.index = oi + + return err +} + +// Decode a slice of embedded messages. +func (o *Buffer) dec_slice_struct_message(p *Properties, base structPointer) error { + return o.dec_slice_struct(p, false, base) +} + +// Decode a slice of embedded groups. +func (o *Buffer) dec_slice_struct_group(p *Properties, base structPointer) error { + return o.dec_slice_struct(p, true, base) +} + +// Decode a slice of structs ([]*struct). +func (o *Buffer) dec_slice_struct(p *Properties, is_group bool, base structPointer) error { + v := reflect.New(p.stype) + bas := toStructPointer(v) + structPointer_StructPointerSlice(base, p.field).Append(bas) + + if is_group { + err := o.unmarshalType(p.stype, p.sprop, is_group, bas) + return err + } + + raw, err := o.DecodeRawBytes(false) + if err != nil { + return err + } + + // If the object can unmarshal itself, let it. + if p.isUnmarshaler { + iv := v.Interface() + return iv.(Unmarshaler).Unmarshal(raw) + } + + obuf := o.buf + oi := o.index + o.buf = raw + o.index = 0 + + err = o.unmarshalType(p.stype, p.sprop, is_group, bas) + + o.buf = obuf + o.index = oi + + return err +} diff --git a/vendor/github.com/golang/protobuf/proto/decode_test.go b/vendor/github.com/golang/protobuf/proto/decode_test.go new file mode 100644 index 000000000..2c4c31d12 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/decode_test.go @@ -0,0 +1,258 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2010 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// +build go1.7 + +package proto_test + +import ( + "fmt" + "testing" + + "github.com/golang/protobuf/proto" + tpb "github.com/golang/protobuf/proto/proto3_proto" +) + +var ( + bytesBlackhole []byte + msgBlackhole = new(tpb.Message) +) + +// BenchmarkVarint32ArraySmall shows the performance on an array of small int32 fields (1 and +// 2 bytes long). +func BenchmarkVarint32ArraySmall(b *testing.B) { + for i := uint(1); i <= 10; i++ { + dist := genInt32Dist([7]int{0, 3, 1}, 1<2GB. + ErrTooLarge = errors.New("proto: message encodes to over 2 GB") +) + +// The fundamental encoders that put bytes on the wire. +// Those that take integer types all accept uint64 and are +// therefore of type valueEncoder. + +const maxVarintBytes = 10 // maximum length of a varint + +// maxMarshalSize is the largest allowed size of an encoded protobuf, +// since C++ and Java use signed int32s for the size. +const maxMarshalSize = 1<<31 - 1 + +// EncodeVarint returns the varint encoding of x. +// This is the format for the +// int32, int64, uint32, uint64, bool, and enum +// protocol buffer types. +// Not used by the package itself, but helpful to clients +// wishing to use the same encoding. +func EncodeVarint(x uint64) []byte { + var buf [maxVarintBytes]byte + var n int + for n = 0; x > 127; n++ { + buf[n] = 0x80 | uint8(x&0x7F) + x >>= 7 + } + buf[n] = uint8(x) + n++ + return buf[0:n] +} + +// EncodeVarint writes a varint-encoded integer to the Buffer. +// This is the format for the +// int32, int64, uint32, uint64, bool, and enum +// protocol buffer types. +func (p *Buffer) EncodeVarint(x uint64) error { + for x >= 1<<7 { + p.buf = append(p.buf, uint8(x&0x7f|0x80)) + x >>= 7 + } + p.buf = append(p.buf, uint8(x)) + return nil +} + +// SizeVarint returns the varint encoding size of an integer. +func SizeVarint(x uint64) int { + return sizeVarint(x) +} + +func sizeVarint(x uint64) (n int) { + for { + n++ + x >>= 7 + if x == 0 { + break + } + } + return n +} + +// EncodeFixed64 writes a 64-bit integer to the Buffer. +// This is the format for the +// fixed64, sfixed64, and double protocol buffer types. +func (p *Buffer) EncodeFixed64(x uint64) error { + p.buf = append(p.buf, + uint8(x), + uint8(x>>8), + uint8(x>>16), + uint8(x>>24), + uint8(x>>32), + uint8(x>>40), + uint8(x>>48), + uint8(x>>56)) + return nil +} + +func sizeFixed64(x uint64) int { + return 8 +} + +// EncodeFixed32 writes a 32-bit integer to the Buffer. +// This is the format for the +// fixed32, sfixed32, and float protocol buffer types. +func (p *Buffer) EncodeFixed32(x uint64) error { + p.buf = append(p.buf, + uint8(x), + uint8(x>>8), + uint8(x>>16), + uint8(x>>24)) + return nil +} + +func sizeFixed32(x uint64) int { + return 4 +} + +// EncodeZigzag64 writes a zigzag-encoded 64-bit integer +// to the Buffer. +// This is the format used for the sint64 protocol buffer type. +func (p *Buffer) EncodeZigzag64(x uint64) error { + // use signed number to get arithmetic right shift. + return p.EncodeVarint((x << 1) ^ uint64((int64(x) >> 63))) +} + +func sizeZigzag64(x uint64) int { + return sizeVarint((x << 1) ^ uint64((int64(x) >> 63))) +} + +// EncodeZigzag32 writes a zigzag-encoded 32-bit integer +// to the Buffer. +// This is the format used for the sint32 protocol buffer type. +func (p *Buffer) EncodeZigzag32(x uint64) error { + // use signed number to get arithmetic right shift. + return p.EncodeVarint(uint64((uint32(x) << 1) ^ uint32((int32(x) >> 31)))) +} + +func sizeZigzag32(x uint64) int { + return sizeVarint(uint64((uint32(x) << 1) ^ uint32((int32(x) >> 31)))) +} + +// EncodeRawBytes writes a count-delimited byte buffer to the Buffer. +// This is the format used for the bytes protocol buffer +// type and for embedded messages. +func (p *Buffer) EncodeRawBytes(b []byte) error { + p.EncodeVarint(uint64(len(b))) + p.buf = append(p.buf, b...) + return nil +} + +func sizeRawBytes(b []byte) int { + return sizeVarint(uint64(len(b))) + + len(b) +} + +// EncodeStringBytes writes an encoded string to the Buffer. +// This is the format used for the proto2 string type. +func (p *Buffer) EncodeStringBytes(s string) error { + p.EncodeVarint(uint64(len(s))) + p.buf = append(p.buf, s...) + return nil +} + +func sizeStringBytes(s string) int { + return sizeVarint(uint64(len(s))) + + len(s) +} + +// Marshaler is the interface representing objects that can marshal themselves. +type Marshaler interface { + Marshal() ([]byte, error) +} + +// Marshal takes the protocol buffer +// and encodes it into the wire format, returning the data. +func Marshal(pb Message) ([]byte, error) { + // Can the object marshal itself? + if m, ok := pb.(Marshaler); ok { + return m.Marshal() + } + p := NewBuffer(nil) + err := p.Marshal(pb) + if p.buf == nil && err == nil { + // Return a non-nil slice on success. + return []byte{}, nil + } + return p.buf, err +} + +// EncodeMessage writes the protocol buffer to the Buffer, +// prefixed by a varint-encoded length. +func (p *Buffer) EncodeMessage(pb Message) error { + t, base, err := getbase(pb) + if structPointer_IsNil(base) { + return ErrNil + } + if err == nil { + var state errorState + err = p.enc_len_struct(GetProperties(t.Elem()), base, &state) + } + return err +} + +// Marshal takes the protocol buffer +// and encodes it into the wire format, writing the result to the +// Buffer. +func (p *Buffer) Marshal(pb Message) error { + // Can the object marshal itself? + if m, ok := pb.(Marshaler); ok { + data, err := m.Marshal() + p.buf = append(p.buf, data...) + return err + } + + t, base, err := getbase(pb) + if structPointer_IsNil(base) { + return ErrNil + } + if err == nil { + err = p.enc_struct(GetProperties(t.Elem()), base) + } + + if collectStats { + (stats).Encode++ // Parens are to work around a goimports bug. + } + + if len(p.buf) > maxMarshalSize { + return ErrTooLarge + } + return err +} + +// Size returns the encoded size of a protocol buffer. +func Size(pb Message) (n int) { + // Can the object marshal itself? If so, Size is slow. + // TODO: add Size to Marshaler, or add a Sizer interface. + if m, ok := pb.(Marshaler); ok { + b, _ := m.Marshal() + return len(b) + } + + t, base, err := getbase(pb) + if structPointer_IsNil(base) { + return 0 + } + if err == nil { + n = size_struct(GetProperties(t.Elem()), base) + } + + if collectStats { + (stats).Size++ // Parens are to work around a goimports bug. + } + + return +} + +// Individual type encoders. + +// Encode a bool. +func (o *Buffer) enc_bool(p *Properties, base structPointer) error { + v := *structPointer_Bool(base, p.field) + if v == nil { + return ErrNil + } + x := 0 + if *v { + x = 1 + } + o.buf = append(o.buf, p.tagcode...) + p.valEnc(o, uint64(x)) + return nil +} + +func (o *Buffer) enc_proto3_bool(p *Properties, base structPointer) error { + v := *structPointer_BoolVal(base, p.field) + if !v { + return ErrNil + } + o.buf = append(o.buf, p.tagcode...) + p.valEnc(o, 1) + return nil +} + +func size_bool(p *Properties, base structPointer) int { + v := *structPointer_Bool(base, p.field) + if v == nil { + return 0 + } + return len(p.tagcode) + 1 // each bool takes exactly one byte +} + +func size_proto3_bool(p *Properties, base structPointer) int { + v := *structPointer_BoolVal(base, p.field) + if !v && !p.oneof { + return 0 + } + return len(p.tagcode) + 1 // each bool takes exactly one byte +} + +// Encode an int32. +func (o *Buffer) enc_int32(p *Properties, base structPointer) error { + v := structPointer_Word32(base, p.field) + if word32_IsNil(v) { + return ErrNil + } + x := int32(word32_Get(v)) // permit sign extension to use full 64-bit range + o.buf = append(o.buf, p.tagcode...) + p.valEnc(o, uint64(x)) + return nil +} + +func (o *Buffer) enc_proto3_int32(p *Properties, base structPointer) error { + v := structPointer_Word32Val(base, p.field) + x := int32(word32Val_Get(v)) // permit sign extension to use full 64-bit range + if x == 0 { + return ErrNil + } + o.buf = append(o.buf, p.tagcode...) + p.valEnc(o, uint64(x)) + return nil +} + +func size_int32(p *Properties, base structPointer) (n int) { + v := structPointer_Word32(base, p.field) + if word32_IsNil(v) { + return 0 + } + x := int32(word32_Get(v)) // permit sign extension to use full 64-bit range + n += len(p.tagcode) + n += p.valSize(uint64(x)) + return +} + +func size_proto3_int32(p *Properties, base structPointer) (n int) { + v := structPointer_Word32Val(base, p.field) + x := int32(word32Val_Get(v)) // permit sign extension to use full 64-bit range + if x == 0 && !p.oneof { + return 0 + } + n += len(p.tagcode) + n += p.valSize(uint64(x)) + return +} + +// Encode a uint32. +// Exactly the same as int32, except for no sign extension. +func (o *Buffer) enc_uint32(p *Properties, base structPointer) error { + v := structPointer_Word32(base, p.field) + if word32_IsNil(v) { + return ErrNil + } + x := word32_Get(v) + o.buf = append(o.buf, p.tagcode...) + p.valEnc(o, uint64(x)) + return nil +} + +func (o *Buffer) enc_proto3_uint32(p *Properties, base structPointer) error { + v := structPointer_Word32Val(base, p.field) + x := word32Val_Get(v) + if x == 0 { + return ErrNil + } + o.buf = append(o.buf, p.tagcode...) + p.valEnc(o, uint64(x)) + return nil +} + +func size_uint32(p *Properties, base structPointer) (n int) { + v := structPointer_Word32(base, p.field) + if word32_IsNil(v) { + return 0 + } + x := word32_Get(v) + n += len(p.tagcode) + n += p.valSize(uint64(x)) + return +} + +func size_proto3_uint32(p *Properties, base structPointer) (n int) { + v := structPointer_Word32Val(base, p.field) + x := word32Val_Get(v) + if x == 0 && !p.oneof { + return 0 + } + n += len(p.tagcode) + n += p.valSize(uint64(x)) + return +} + +// Encode an int64. +func (o *Buffer) enc_int64(p *Properties, base structPointer) error { + v := structPointer_Word64(base, p.field) + if word64_IsNil(v) { + return ErrNil + } + x := word64_Get(v) + o.buf = append(o.buf, p.tagcode...) + p.valEnc(o, x) + return nil +} + +func (o *Buffer) enc_proto3_int64(p *Properties, base structPointer) error { + v := structPointer_Word64Val(base, p.field) + x := word64Val_Get(v) + if x == 0 { + return ErrNil + } + o.buf = append(o.buf, p.tagcode...) + p.valEnc(o, x) + return nil +} + +func size_int64(p *Properties, base structPointer) (n int) { + v := structPointer_Word64(base, p.field) + if word64_IsNil(v) { + return 0 + } + x := word64_Get(v) + n += len(p.tagcode) + n += p.valSize(x) + return +} + +func size_proto3_int64(p *Properties, base structPointer) (n int) { + v := structPointer_Word64Val(base, p.field) + x := word64Val_Get(v) + if x == 0 && !p.oneof { + return 0 + } + n += len(p.tagcode) + n += p.valSize(x) + return +} + +// Encode a string. +func (o *Buffer) enc_string(p *Properties, base structPointer) error { + v := *structPointer_String(base, p.field) + if v == nil { + return ErrNil + } + x := *v + o.buf = append(o.buf, p.tagcode...) + o.EncodeStringBytes(x) + return nil +} + +func (o *Buffer) enc_proto3_string(p *Properties, base structPointer) error { + v := *structPointer_StringVal(base, p.field) + if v == "" { + return ErrNil + } + o.buf = append(o.buf, p.tagcode...) + o.EncodeStringBytes(v) + return nil +} + +func size_string(p *Properties, base structPointer) (n int) { + v := *structPointer_String(base, p.field) + if v == nil { + return 0 + } + x := *v + n += len(p.tagcode) + n += sizeStringBytes(x) + return +} + +func size_proto3_string(p *Properties, base structPointer) (n int) { + v := *structPointer_StringVal(base, p.field) + if v == "" && !p.oneof { + return 0 + } + n += len(p.tagcode) + n += sizeStringBytes(v) + return +} + +// All protocol buffer fields are nillable, but be careful. +func isNil(v reflect.Value) bool { + switch v.Kind() { + case reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice: + return v.IsNil() + } + return false +} + +// Encode a message struct. +func (o *Buffer) enc_struct_message(p *Properties, base structPointer) error { + var state errorState + structp := structPointer_GetStructPointer(base, p.field) + if structPointer_IsNil(structp) { + return ErrNil + } + + // Can the object marshal itself? + if p.isMarshaler { + m := structPointer_Interface(structp, p.stype).(Marshaler) + data, err := m.Marshal() + if err != nil && !state.shouldContinue(err, nil) { + return err + } + o.buf = append(o.buf, p.tagcode...) + o.EncodeRawBytes(data) + return state.err + } + + o.buf = append(o.buf, p.tagcode...) + return o.enc_len_struct(p.sprop, structp, &state) +} + +func size_struct_message(p *Properties, base structPointer) int { + structp := structPointer_GetStructPointer(base, p.field) + if structPointer_IsNil(structp) { + return 0 + } + + // Can the object marshal itself? + if p.isMarshaler { + m := structPointer_Interface(structp, p.stype).(Marshaler) + data, _ := m.Marshal() + n0 := len(p.tagcode) + n1 := sizeRawBytes(data) + return n0 + n1 + } + + n0 := len(p.tagcode) + n1 := size_struct(p.sprop, structp) + n2 := sizeVarint(uint64(n1)) // size of encoded length + return n0 + n1 + n2 +} + +// Encode a group struct. +func (o *Buffer) enc_struct_group(p *Properties, base structPointer) error { + var state errorState + b := structPointer_GetStructPointer(base, p.field) + if structPointer_IsNil(b) { + return ErrNil + } + + o.EncodeVarint(uint64((p.Tag << 3) | WireStartGroup)) + err := o.enc_struct(p.sprop, b) + if err != nil && !state.shouldContinue(err, nil) { + return err + } + o.EncodeVarint(uint64((p.Tag << 3) | WireEndGroup)) + return state.err +} + +func size_struct_group(p *Properties, base structPointer) (n int) { + b := structPointer_GetStructPointer(base, p.field) + if structPointer_IsNil(b) { + return 0 + } + + n += sizeVarint(uint64((p.Tag << 3) | WireStartGroup)) + n += size_struct(p.sprop, b) + n += sizeVarint(uint64((p.Tag << 3) | WireEndGroup)) + return +} + +// Encode a slice of bools ([]bool). +func (o *Buffer) enc_slice_bool(p *Properties, base structPointer) error { + s := *structPointer_BoolSlice(base, p.field) + l := len(s) + if l == 0 { + return ErrNil + } + for _, x := range s { + o.buf = append(o.buf, p.tagcode...) + v := uint64(0) + if x { + v = 1 + } + p.valEnc(o, v) + } + return nil +} + +func size_slice_bool(p *Properties, base structPointer) int { + s := *structPointer_BoolSlice(base, p.field) + l := len(s) + if l == 0 { + return 0 + } + return l * (len(p.tagcode) + 1) // each bool takes exactly one byte +} + +// Encode a slice of bools ([]bool) in packed format. +func (o *Buffer) enc_slice_packed_bool(p *Properties, base structPointer) error { + s := *structPointer_BoolSlice(base, p.field) + l := len(s) + if l == 0 { + return ErrNil + } + o.buf = append(o.buf, p.tagcode...) + o.EncodeVarint(uint64(l)) // each bool takes exactly one byte + for _, x := range s { + v := uint64(0) + if x { + v = 1 + } + p.valEnc(o, v) + } + return nil +} + +func size_slice_packed_bool(p *Properties, base structPointer) (n int) { + s := *structPointer_BoolSlice(base, p.field) + l := len(s) + if l == 0 { + return 0 + } + n += len(p.tagcode) + n += sizeVarint(uint64(l)) + n += l // each bool takes exactly one byte + return +} + +// Encode a slice of bytes ([]byte). +func (o *Buffer) enc_slice_byte(p *Properties, base structPointer) error { + s := *structPointer_Bytes(base, p.field) + if s == nil { + return ErrNil + } + o.buf = append(o.buf, p.tagcode...) + o.EncodeRawBytes(s) + return nil +} + +func (o *Buffer) enc_proto3_slice_byte(p *Properties, base structPointer) error { + s := *structPointer_Bytes(base, p.field) + if len(s) == 0 { + return ErrNil + } + o.buf = append(o.buf, p.tagcode...) + o.EncodeRawBytes(s) + return nil +} + +func size_slice_byte(p *Properties, base structPointer) (n int) { + s := *structPointer_Bytes(base, p.field) + if s == nil && !p.oneof { + return 0 + } + n += len(p.tagcode) + n += sizeRawBytes(s) + return +} + +func size_proto3_slice_byte(p *Properties, base structPointer) (n int) { + s := *structPointer_Bytes(base, p.field) + if len(s) == 0 && !p.oneof { + return 0 + } + n += len(p.tagcode) + n += sizeRawBytes(s) + return +} + +// Encode a slice of int32s ([]int32). +func (o *Buffer) enc_slice_int32(p *Properties, base structPointer) error { + s := structPointer_Word32Slice(base, p.field) + l := s.Len() + if l == 0 { + return ErrNil + } + for i := 0; i < l; i++ { + o.buf = append(o.buf, p.tagcode...) + x := int32(s.Index(i)) // permit sign extension to use full 64-bit range + p.valEnc(o, uint64(x)) + } + return nil +} + +func size_slice_int32(p *Properties, base structPointer) (n int) { + s := structPointer_Word32Slice(base, p.field) + l := s.Len() + if l == 0 { + return 0 + } + for i := 0; i < l; i++ { + n += len(p.tagcode) + x := int32(s.Index(i)) // permit sign extension to use full 64-bit range + n += p.valSize(uint64(x)) + } + return +} + +// Encode a slice of int32s ([]int32) in packed format. +func (o *Buffer) enc_slice_packed_int32(p *Properties, base structPointer) error { + s := structPointer_Word32Slice(base, p.field) + l := s.Len() + if l == 0 { + return ErrNil + } + // TODO: Reuse a Buffer. + buf := NewBuffer(nil) + for i := 0; i < l; i++ { + x := int32(s.Index(i)) // permit sign extension to use full 64-bit range + p.valEnc(buf, uint64(x)) + } + + o.buf = append(o.buf, p.tagcode...) + o.EncodeVarint(uint64(len(buf.buf))) + o.buf = append(o.buf, buf.buf...) + return nil +} + +func size_slice_packed_int32(p *Properties, base structPointer) (n int) { + s := structPointer_Word32Slice(base, p.field) + l := s.Len() + if l == 0 { + return 0 + } + var bufSize int + for i := 0; i < l; i++ { + x := int32(s.Index(i)) // permit sign extension to use full 64-bit range + bufSize += p.valSize(uint64(x)) + } + + n += len(p.tagcode) + n += sizeVarint(uint64(bufSize)) + n += bufSize + return +} + +// Encode a slice of uint32s ([]uint32). +// Exactly the same as int32, except for no sign extension. +func (o *Buffer) enc_slice_uint32(p *Properties, base structPointer) error { + s := structPointer_Word32Slice(base, p.field) + l := s.Len() + if l == 0 { + return ErrNil + } + for i := 0; i < l; i++ { + o.buf = append(o.buf, p.tagcode...) + x := s.Index(i) + p.valEnc(o, uint64(x)) + } + return nil +} + +func size_slice_uint32(p *Properties, base structPointer) (n int) { + s := structPointer_Word32Slice(base, p.field) + l := s.Len() + if l == 0 { + return 0 + } + for i := 0; i < l; i++ { + n += len(p.tagcode) + x := s.Index(i) + n += p.valSize(uint64(x)) + } + return +} + +// Encode a slice of uint32s ([]uint32) in packed format. +// Exactly the same as int32, except for no sign extension. +func (o *Buffer) enc_slice_packed_uint32(p *Properties, base structPointer) error { + s := structPointer_Word32Slice(base, p.field) + l := s.Len() + if l == 0 { + return ErrNil + } + // TODO: Reuse a Buffer. + buf := NewBuffer(nil) + for i := 0; i < l; i++ { + p.valEnc(buf, uint64(s.Index(i))) + } + + o.buf = append(o.buf, p.tagcode...) + o.EncodeVarint(uint64(len(buf.buf))) + o.buf = append(o.buf, buf.buf...) + return nil +} + +func size_slice_packed_uint32(p *Properties, base structPointer) (n int) { + s := structPointer_Word32Slice(base, p.field) + l := s.Len() + if l == 0 { + return 0 + } + var bufSize int + for i := 0; i < l; i++ { + bufSize += p.valSize(uint64(s.Index(i))) + } + + n += len(p.tagcode) + n += sizeVarint(uint64(bufSize)) + n += bufSize + return +} + +// Encode a slice of int64s ([]int64). +func (o *Buffer) enc_slice_int64(p *Properties, base structPointer) error { + s := structPointer_Word64Slice(base, p.field) + l := s.Len() + if l == 0 { + return ErrNil + } + for i := 0; i < l; i++ { + o.buf = append(o.buf, p.tagcode...) + p.valEnc(o, s.Index(i)) + } + return nil +} + +func size_slice_int64(p *Properties, base structPointer) (n int) { + s := structPointer_Word64Slice(base, p.field) + l := s.Len() + if l == 0 { + return 0 + } + for i := 0; i < l; i++ { + n += len(p.tagcode) + n += p.valSize(s.Index(i)) + } + return +} + +// Encode a slice of int64s ([]int64) in packed format. +func (o *Buffer) enc_slice_packed_int64(p *Properties, base structPointer) error { + s := structPointer_Word64Slice(base, p.field) + l := s.Len() + if l == 0 { + return ErrNil + } + // TODO: Reuse a Buffer. + buf := NewBuffer(nil) + for i := 0; i < l; i++ { + p.valEnc(buf, s.Index(i)) + } + + o.buf = append(o.buf, p.tagcode...) + o.EncodeVarint(uint64(len(buf.buf))) + o.buf = append(o.buf, buf.buf...) + return nil +} + +func size_slice_packed_int64(p *Properties, base structPointer) (n int) { + s := structPointer_Word64Slice(base, p.field) + l := s.Len() + if l == 0 { + return 0 + } + var bufSize int + for i := 0; i < l; i++ { + bufSize += p.valSize(s.Index(i)) + } + + n += len(p.tagcode) + n += sizeVarint(uint64(bufSize)) + n += bufSize + return +} + +// Encode a slice of slice of bytes ([][]byte). +func (o *Buffer) enc_slice_slice_byte(p *Properties, base structPointer) error { + ss := *structPointer_BytesSlice(base, p.field) + l := len(ss) + if l == 0 { + return ErrNil + } + for i := 0; i < l; i++ { + o.buf = append(o.buf, p.tagcode...) + o.EncodeRawBytes(ss[i]) + } + return nil +} + +func size_slice_slice_byte(p *Properties, base structPointer) (n int) { + ss := *structPointer_BytesSlice(base, p.field) + l := len(ss) + if l == 0 { + return 0 + } + n += l * len(p.tagcode) + for i := 0; i < l; i++ { + n += sizeRawBytes(ss[i]) + } + return +} + +// Encode a slice of strings ([]string). +func (o *Buffer) enc_slice_string(p *Properties, base structPointer) error { + ss := *structPointer_StringSlice(base, p.field) + l := len(ss) + for i := 0; i < l; i++ { + o.buf = append(o.buf, p.tagcode...) + o.EncodeStringBytes(ss[i]) + } + return nil +} + +func size_slice_string(p *Properties, base structPointer) (n int) { + ss := *structPointer_StringSlice(base, p.field) + l := len(ss) + n += l * len(p.tagcode) + for i := 0; i < l; i++ { + n += sizeStringBytes(ss[i]) + } + return +} + +// Encode a slice of message structs ([]*struct). +func (o *Buffer) enc_slice_struct_message(p *Properties, base structPointer) error { + var state errorState + s := structPointer_StructPointerSlice(base, p.field) + l := s.Len() + + for i := 0; i < l; i++ { + structp := s.Index(i) + if structPointer_IsNil(structp) { + return errRepeatedHasNil + } + + // Can the object marshal itself? + if p.isMarshaler { + m := structPointer_Interface(structp, p.stype).(Marshaler) + data, err := m.Marshal() + if err != nil && !state.shouldContinue(err, nil) { + return err + } + o.buf = append(o.buf, p.tagcode...) + o.EncodeRawBytes(data) + continue + } + + o.buf = append(o.buf, p.tagcode...) + err := o.enc_len_struct(p.sprop, structp, &state) + if err != nil && !state.shouldContinue(err, nil) { + if err == ErrNil { + return errRepeatedHasNil + } + return err + } + } + return state.err +} + +func size_slice_struct_message(p *Properties, base structPointer) (n int) { + s := structPointer_StructPointerSlice(base, p.field) + l := s.Len() + n += l * len(p.tagcode) + for i := 0; i < l; i++ { + structp := s.Index(i) + if structPointer_IsNil(structp) { + return // return the size up to this point + } + + // Can the object marshal itself? + if p.isMarshaler { + m := structPointer_Interface(structp, p.stype).(Marshaler) + data, _ := m.Marshal() + n += sizeRawBytes(data) + continue + } + + n0 := size_struct(p.sprop, structp) + n1 := sizeVarint(uint64(n0)) // size of encoded length + n += n0 + n1 + } + return +} + +// Encode a slice of group structs ([]*struct). +func (o *Buffer) enc_slice_struct_group(p *Properties, base structPointer) error { + var state errorState + s := structPointer_StructPointerSlice(base, p.field) + l := s.Len() + + for i := 0; i < l; i++ { + b := s.Index(i) + if structPointer_IsNil(b) { + return errRepeatedHasNil + } + + o.EncodeVarint(uint64((p.Tag << 3) | WireStartGroup)) + + err := o.enc_struct(p.sprop, b) + + if err != nil && !state.shouldContinue(err, nil) { + if err == ErrNil { + return errRepeatedHasNil + } + return err + } + + o.EncodeVarint(uint64((p.Tag << 3) | WireEndGroup)) + } + return state.err +} + +func size_slice_struct_group(p *Properties, base structPointer) (n int) { + s := structPointer_StructPointerSlice(base, p.field) + l := s.Len() + + n += l * sizeVarint(uint64((p.Tag<<3)|WireStartGroup)) + n += l * sizeVarint(uint64((p.Tag<<3)|WireEndGroup)) + for i := 0; i < l; i++ { + b := s.Index(i) + if structPointer_IsNil(b) { + return // return size up to this point + } + + n += size_struct(p.sprop, b) + } + return +} + +// Encode an extension map. +func (o *Buffer) enc_map(p *Properties, base structPointer) error { + exts := structPointer_ExtMap(base, p.field) + if err := encodeExtensionsMap(*exts); err != nil { + return err + } + + return o.enc_map_body(*exts) +} + +func (o *Buffer) enc_exts(p *Properties, base structPointer) error { + exts := structPointer_Extensions(base, p.field) + + v, mu := exts.extensionsRead() + if v == nil { + return nil + } + + mu.Lock() + defer mu.Unlock() + if err := encodeExtensionsMap(v); err != nil { + return err + } + + return o.enc_map_body(v) +} + +func (o *Buffer) enc_map_body(v map[int32]Extension) error { + // Fast-path for common cases: zero or one extensions. + if len(v) <= 1 { + for _, e := range v { + o.buf = append(o.buf, e.enc...) + } + return nil + } + + // Sort keys to provide a deterministic encoding. + keys := make([]int, 0, len(v)) + for k := range v { + keys = append(keys, int(k)) + } + sort.Ints(keys) + + for _, k := range keys { + o.buf = append(o.buf, v[int32(k)].enc...) + } + return nil +} + +func size_map(p *Properties, base structPointer) int { + v := structPointer_ExtMap(base, p.field) + return extensionsMapSize(*v) +} + +func size_exts(p *Properties, base structPointer) int { + v := structPointer_Extensions(base, p.field) + return extensionsSize(v) +} + +// Encode a map field. +func (o *Buffer) enc_new_map(p *Properties, base structPointer) error { + var state errorState // XXX: or do we need to plumb this through? + + /* + A map defined as + map map_field = N; + is encoded in the same way as + message MapFieldEntry { + key_type key = 1; + value_type value = 2; + } + repeated MapFieldEntry map_field = N; + */ + + v := structPointer_NewAt(base, p.field, p.mtype).Elem() // map[K]V + if v.Len() == 0 { + return nil + } + + keycopy, valcopy, keybase, valbase := mapEncodeScratch(p.mtype) + + enc := func() error { + if err := p.mkeyprop.enc(o, p.mkeyprop, keybase); err != nil { + return err + } + if err := p.mvalprop.enc(o, p.mvalprop, valbase); err != nil && err != ErrNil { + return err + } + return nil + } + + // Don't sort map keys. It is not required by the spec, and C++ doesn't do it. + for _, key := range v.MapKeys() { + val := v.MapIndex(key) + + keycopy.Set(key) + valcopy.Set(val) + + o.buf = append(o.buf, p.tagcode...) + if err := o.enc_len_thing(enc, &state); err != nil { + return err + } + } + return nil +} + +func size_new_map(p *Properties, base structPointer) int { + v := structPointer_NewAt(base, p.field, p.mtype).Elem() // map[K]V + + keycopy, valcopy, keybase, valbase := mapEncodeScratch(p.mtype) + + n := 0 + for _, key := range v.MapKeys() { + val := v.MapIndex(key) + keycopy.Set(key) + valcopy.Set(val) + + // Tag codes for key and val are the responsibility of the sub-sizer. + keysize := p.mkeyprop.size(p.mkeyprop, keybase) + valsize := p.mvalprop.size(p.mvalprop, valbase) + entry := keysize + valsize + // Add on tag code and length of map entry itself. + n += len(p.tagcode) + sizeVarint(uint64(entry)) + entry + } + return n +} + +// mapEncodeScratch returns a new reflect.Value matching the map's value type, +// and a structPointer suitable for passing to an encoder or sizer. +func mapEncodeScratch(mapType reflect.Type) (keycopy, valcopy reflect.Value, keybase, valbase structPointer) { + // Prepare addressable doubly-indirect placeholders for the key and value types. + // This is needed because the element-type encoders expect **T, but the map iteration produces T. + + keycopy = reflect.New(mapType.Key()).Elem() // addressable K + keyptr := reflect.New(reflect.PtrTo(keycopy.Type())).Elem() // addressable *K + keyptr.Set(keycopy.Addr()) // + keybase = toStructPointer(keyptr.Addr()) // **K + + // Value types are more varied and require special handling. + switch mapType.Elem().Kind() { + case reflect.Slice: + // []byte + var dummy []byte + valcopy = reflect.ValueOf(&dummy).Elem() // addressable []byte + valbase = toStructPointer(valcopy.Addr()) + case reflect.Ptr: + // message; the generated field type is map[K]*Msg (so V is *Msg), + // so we only need one level of indirection. + valcopy = reflect.New(mapType.Elem()).Elem() // addressable V + valbase = toStructPointer(valcopy.Addr()) + default: + // everything else + valcopy = reflect.New(mapType.Elem()).Elem() // addressable V + valptr := reflect.New(reflect.PtrTo(valcopy.Type())).Elem() // addressable *V + valptr.Set(valcopy.Addr()) // + valbase = toStructPointer(valptr.Addr()) // **V + } + return +} + +// Encode a struct. +func (o *Buffer) enc_struct(prop *StructProperties, base structPointer) error { + var state errorState + // Encode fields in tag order so that decoders may use optimizations + // that depend on the ordering. + // https://developers.google.com/protocol-buffers/docs/encoding#order + for _, i := range prop.order { + p := prop.Prop[i] + if p.enc != nil { + err := p.enc(o, p, base) + if err != nil { + if err == ErrNil { + if p.Required && state.err == nil { + state.err = &RequiredNotSetError{p.Name} + } + } else if err == errRepeatedHasNil { + // Give more context to nil values in repeated fields. + return errors.New("repeated field " + p.OrigName + " has nil element") + } else if !state.shouldContinue(err, p) { + return err + } + } + if len(o.buf) > maxMarshalSize { + return ErrTooLarge + } + } + } + + // Do oneof fields. + if prop.oneofMarshaler != nil { + m := structPointer_Interface(base, prop.stype).(Message) + if err := prop.oneofMarshaler(m, o); err == ErrNil { + return errOneofHasNil + } else if err != nil { + return err + } + } + + // Add unrecognized fields at the end. + if prop.unrecField.IsValid() { + v := *structPointer_Bytes(base, prop.unrecField) + if len(o.buf)+len(v) > maxMarshalSize { + return ErrTooLarge + } + if len(v) > 0 { + o.buf = append(o.buf, v...) + } + } + + return state.err +} + +func size_struct(prop *StructProperties, base structPointer) (n int) { + for _, i := range prop.order { + p := prop.Prop[i] + if p.size != nil { + n += p.size(p, base) + } + } + + // Add unrecognized fields at the end. + if prop.unrecField.IsValid() { + v := *structPointer_Bytes(base, prop.unrecField) + n += len(v) + } + + // Factor in any oneof fields. + if prop.oneofSizer != nil { + m := structPointer_Interface(base, prop.stype).(Message) + n += prop.oneofSizer(m) + } + + return +} + +var zeroes [20]byte // longer than any conceivable sizeVarint + +// Encode a struct, preceded by its encoded length (as a varint). +func (o *Buffer) enc_len_struct(prop *StructProperties, base structPointer, state *errorState) error { + return o.enc_len_thing(func() error { return o.enc_struct(prop, base) }, state) +} + +// Encode something, preceded by its encoded length (as a varint). +func (o *Buffer) enc_len_thing(enc func() error, state *errorState) error { + iLen := len(o.buf) + o.buf = append(o.buf, 0, 0, 0, 0) // reserve four bytes for length + iMsg := len(o.buf) + err := enc() + if err != nil && !state.shouldContinue(err, nil) { + return err + } + lMsg := len(o.buf) - iMsg + lLen := sizeVarint(uint64(lMsg)) + switch x := lLen - (iMsg - iLen); { + case x > 0: // actual length is x bytes larger than the space we reserved + // Move msg x bytes right. + o.buf = append(o.buf, zeroes[:x]...) + copy(o.buf[iMsg+x:], o.buf[iMsg:iMsg+lMsg]) + case x < 0: // actual length is x bytes smaller than the space we reserved + // Move msg x bytes left. + copy(o.buf[iMsg+x:], o.buf[iMsg:iMsg+lMsg]) + o.buf = o.buf[:len(o.buf)+x] // x is negative + } + // Encode the length in the reserved space. + o.buf = o.buf[:iLen] + o.EncodeVarint(uint64(lMsg)) + o.buf = o.buf[:len(o.buf)+lMsg] + return state.err +} + +// errorState maintains the first error that occurs and updates that error +// with additional context. +type errorState struct { + err error +} + +// shouldContinue reports whether encoding should continue upon encountering the +// given error. If the error is RequiredNotSetError, shouldContinue returns true +// and, if this is the first appearance of that error, remembers it for future +// reporting. +// +// If prop is not nil, it may update any error with additional context about the +// field with the error. +func (s *errorState) shouldContinue(err error, prop *Properties) bool { + // Ignore unset required fields. + reqNotSet, ok := err.(*RequiredNotSetError) + if !ok { + return false + } + if s.err == nil { + if prop != nil { + err = &RequiredNotSetError{prop.Name + "." + reqNotSet.field} + } + s.err = err + } + return true +} diff --git a/vendor/github.com/golang/protobuf/proto/encode_test.go b/vendor/github.com/golang/protobuf/proto/encode_test.go new file mode 100644 index 000000000..a7209475f --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/encode_test.go @@ -0,0 +1,85 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2010 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// +build go1.7 + +package proto_test + +import ( + "strconv" + "testing" + + "github.com/golang/protobuf/proto" + tpb "github.com/golang/protobuf/proto/proto3_proto" + "github.com/golang/protobuf/ptypes" +) + +var ( + blackhole []byte +) + +// BenchmarkAny creates increasingly large arbitrary Any messages. The type is always the +// same. +func BenchmarkAny(b *testing.B) { + data := make([]byte, 1<<20) + quantum := 1 << 10 + for i := uint(0); i <= 10; i++ { + b.Run(strconv.Itoa(quantum<= len(o.buf) { + break + } + } + return value.Interface(), nil +} + +// GetExtensions returns a slice of the extensions present in pb that are also listed in es. +// The returned slice has the same length as es; missing extensions will appear as nil elements. +func GetExtensions(pb Message, es []*ExtensionDesc) (extensions []interface{}, err error) { + epb, ok := extendable(pb) + if !ok { + return nil, errors.New("proto: not an extendable proto") + } + extensions = make([]interface{}, len(es)) + for i, e := range es { + extensions[i], err = GetExtension(epb, e) + if err == ErrMissingExtension { + err = nil + } + if err != nil { + return + } + } + return +} + +// ExtensionDescs returns a new slice containing pb's extension descriptors, in undefined order. +// For non-registered extensions, ExtensionDescs returns an incomplete descriptor containing +// just the Field field, which defines the extension's field number. +func ExtensionDescs(pb Message) ([]*ExtensionDesc, error) { + epb, ok := extendable(pb) + if !ok { + return nil, fmt.Errorf("proto: %T is not an extendable proto.Message", pb) + } + registeredExtensions := RegisteredExtensions(pb) + + emap, mu := epb.extensionsRead() + if emap == nil { + return nil, nil + } + mu.Lock() + defer mu.Unlock() + extensions := make([]*ExtensionDesc, 0, len(emap)) + for extid, e := range emap { + desc := e.desc + if desc == nil { + desc = registeredExtensions[extid] + if desc == nil { + desc = &ExtensionDesc{Field: extid} + } + } + + extensions = append(extensions, desc) + } + return extensions, nil +} + +// SetExtension sets the specified extension of pb to the specified value. +func SetExtension(pb Message, extension *ExtensionDesc, value interface{}) error { + epb, ok := extendable(pb) + if !ok { + return errors.New("proto: not an extendable proto") + } + if err := checkExtensionTypes(epb, extension); err != nil { + return err + } + typ := reflect.TypeOf(extension.ExtensionType) + if typ != reflect.TypeOf(value) { + return errors.New("proto: bad extension value type") + } + // nil extension values need to be caught early, because the + // encoder can't distinguish an ErrNil due to a nil extension + // from an ErrNil due to a missing field. Extensions are + // always optional, so the encoder would just swallow the error + // and drop all the extensions from the encoded message. + if reflect.ValueOf(value).IsNil() { + return fmt.Errorf("proto: SetExtension called with nil value of type %T", value) + } + + extmap := epb.extensionsWrite() + extmap[extension.Field] = Extension{desc: extension, value: value} + return nil +} + +// ClearAllExtensions clears all extensions from pb. +func ClearAllExtensions(pb Message) { + epb, ok := extendable(pb) + if !ok { + return + } + m := epb.extensionsWrite() + for k := range m { + delete(m, k) + } +} + +// A global registry of extensions. +// The generated code will register the generated descriptors by calling RegisterExtension. + +var extensionMaps = make(map[reflect.Type]map[int32]*ExtensionDesc) + +// RegisterExtension is called from the generated code. +func RegisterExtension(desc *ExtensionDesc) { + st := reflect.TypeOf(desc.ExtendedType).Elem() + m := extensionMaps[st] + if m == nil { + m = make(map[int32]*ExtensionDesc) + extensionMaps[st] = m + } + if _, ok := m[desc.Field]; ok { + panic("proto: duplicate extension registered: " + st.String() + " " + strconv.Itoa(int(desc.Field))) + } + m[desc.Field] = desc +} + +// RegisteredExtensions returns a map of the registered extensions of a +// protocol buffer struct, indexed by the extension number. +// The argument pb should be a nil pointer to the struct type. +func RegisteredExtensions(pb Message) map[int32]*ExtensionDesc { + return extensionMaps[reflect.TypeOf(pb).Elem()] +} diff --git a/vendor/github.com/golang/protobuf/proto/extensions_test.go b/vendor/github.com/golang/protobuf/proto/extensions_test.go new file mode 100644 index 000000000..b6d9114c5 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/extensions_test.go @@ -0,0 +1,536 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2014 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package proto_test + +import ( + "bytes" + "fmt" + "reflect" + "sort" + "testing" + + "github.com/golang/protobuf/proto" + pb "github.com/golang/protobuf/proto/testdata" + "golang.org/x/sync/errgroup" +) + +func TestGetExtensionsWithMissingExtensions(t *testing.T) { + msg := &pb.MyMessage{} + ext1 := &pb.Ext{} + if err := proto.SetExtension(msg, pb.E_Ext_More, ext1); err != nil { + t.Fatalf("Could not set ext1: %s", err) + } + exts, err := proto.GetExtensions(msg, []*proto.ExtensionDesc{ + pb.E_Ext_More, + pb.E_Ext_Text, + }) + if err != nil { + t.Fatalf("GetExtensions() failed: %s", err) + } + if exts[0] != ext1 { + t.Errorf("ext1 not in returned extensions: %T %v", exts[0], exts[0]) + } + if exts[1] != nil { + t.Errorf("ext2 in returned extensions: %T %v", exts[1], exts[1]) + } +} + +func TestExtensionDescsWithMissingExtensions(t *testing.T) { + msg := &pb.MyMessage{Count: proto.Int32(0)} + extdesc1 := pb.E_Ext_More + if descs, err := proto.ExtensionDescs(msg); len(descs) != 0 || err != nil { + t.Errorf("proto.ExtensionDescs: got %d descs, error %v; want 0, nil", len(descs), err) + } + + ext1 := &pb.Ext{} + if err := proto.SetExtension(msg, extdesc1, ext1); err != nil { + t.Fatalf("Could not set ext1: %s", err) + } + extdesc2 := &proto.ExtensionDesc{ + ExtendedType: (*pb.MyMessage)(nil), + ExtensionType: (*bool)(nil), + Field: 123456789, + Name: "a.b", + Tag: "varint,123456789,opt", + } + ext2 := proto.Bool(false) + if err := proto.SetExtension(msg, extdesc2, ext2); err != nil { + t.Fatalf("Could not set ext2: %s", err) + } + + b, err := proto.Marshal(msg) + if err != nil { + t.Fatalf("Could not marshal msg: %v", err) + } + if err := proto.Unmarshal(b, msg); err != nil { + t.Fatalf("Could not unmarshal into msg: %v", err) + } + + descs, err := proto.ExtensionDescs(msg) + if err != nil { + t.Fatalf("proto.ExtensionDescs: got error %v", err) + } + sortExtDescs(descs) + wantDescs := []*proto.ExtensionDesc{extdesc1, &proto.ExtensionDesc{Field: extdesc2.Field}} + if !reflect.DeepEqual(descs, wantDescs) { + t.Errorf("proto.ExtensionDescs(msg) sorted extension ids: got %+v, want %+v", descs, wantDescs) + } +} + +type ExtensionDescSlice []*proto.ExtensionDesc + +func (s ExtensionDescSlice) Len() int { return len(s) } +func (s ExtensionDescSlice) Less(i, j int) bool { return s[i].Field < s[j].Field } +func (s ExtensionDescSlice) Swap(i, j int) { s[i], s[j] = s[j], s[i] } + +func sortExtDescs(s []*proto.ExtensionDesc) { + sort.Sort(ExtensionDescSlice(s)) +} + +func TestGetExtensionStability(t *testing.T) { + check := func(m *pb.MyMessage) bool { + ext1, err := proto.GetExtension(m, pb.E_Ext_More) + if err != nil { + t.Fatalf("GetExtension() failed: %s", err) + } + ext2, err := proto.GetExtension(m, pb.E_Ext_More) + if err != nil { + t.Fatalf("GetExtension() failed: %s", err) + } + return ext1 == ext2 + } + msg := &pb.MyMessage{Count: proto.Int32(4)} + ext0 := &pb.Ext{} + if err := proto.SetExtension(msg, pb.E_Ext_More, ext0); err != nil { + t.Fatalf("Could not set ext1: %s", ext0) + } + if !check(msg) { + t.Errorf("GetExtension() not stable before marshaling") + } + bb, err := proto.Marshal(msg) + if err != nil { + t.Fatalf("Marshal() failed: %s", err) + } + msg1 := &pb.MyMessage{} + err = proto.Unmarshal(bb, msg1) + if err != nil { + t.Fatalf("Unmarshal() failed: %s", err) + } + if !check(msg1) { + t.Errorf("GetExtension() not stable after unmarshaling") + } +} + +func TestGetExtensionDefaults(t *testing.T) { + var setFloat64 float64 = 1 + var setFloat32 float32 = 2 + var setInt32 int32 = 3 + var setInt64 int64 = 4 + var setUint32 uint32 = 5 + var setUint64 uint64 = 6 + var setBool = true + var setBool2 = false + var setString = "Goodnight string" + var setBytes = []byte("Goodnight bytes") + var setEnum = pb.DefaultsMessage_TWO + + type testcase struct { + ext *proto.ExtensionDesc // Extension we are testing. + want interface{} // Expected value of extension, or nil (meaning that GetExtension will fail). + def interface{} // Expected value of extension after ClearExtension(). + } + tests := []testcase{ + {pb.E_NoDefaultDouble, setFloat64, nil}, + {pb.E_NoDefaultFloat, setFloat32, nil}, + {pb.E_NoDefaultInt32, setInt32, nil}, + {pb.E_NoDefaultInt64, setInt64, nil}, + {pb.E_NoDefaultUint32, setUint32, nil}, + {pb.E_NoDefaultUint64, setUint64, nil}, + {pb.E_NoDefaultSint32, setInt32, nil}, + {pb.E_NoDefaultSint64, setInt64, nil}, + {pb.E_NoDefaultFixed32, setUint32, nil}, + {pb.E_NoDefaultFixed64, setUint64, nil}, + {pb.E_NoDefaultSfixed32, setInt32, nil}, + {pb.E_NoDefaultSfixed64, setInt64, nil}, + {pb.E_NoDefaultBool, setBool, nil}, + {pb.E_NoDefaultBool, setBool2, nil}, + {pb.E_NoDefaultString, setString, nil}, + {pb.E_NoDefaultBytes, setBytes, nil}, + {pb.E_NoDefaultEnum, setEnum, nil}, + {pb.E_DefaultDouble, setFloat64, float64(3.1415)}, + {pb.E_DefaultFloat, setFloat32, float32(3.14)}, + {pb.E_DefaultInt32, setInt32, int32(42)}, + {pb.E_DefaultInt64, setInt64, int64(43)}, + {pb.E_DefaultUint32, setUint32, uint32(44)}, + {pb.E_DefaultUint64, setUint64, uint64(45)}, + {pb.E_DefaultSint32, setInt32, int32(46)}, + {pb.E_DefaultSint64, setInt64, int64(47)}, + {pb.E_DefaultFixed32, setUint32, uint32(48)}, + {pb.E_DefaultFixed64, setUint64, uint64(49)}, + {pb.E_DefaultSfixed32, setInt32, int32(50)}, + {pb.E_DefaultSfixed64, setInt64, int64(51)}, + {pb.E_DefaultBool, setBool, true}, + {pb.E_DefaultBool, setBool2, true}, + {pb.E_DefaultString, setString, "Hello, string"}, + {pb.E_DefaultBytes, setBytes, []byte("Hello, bytes")}, + {pb.E_DefaultEnum, setEnum, pb.DefaultsMessage_ONE}, + } + + checkVal := func(test testcase, msg *pb.DefaultsMessage, valWant interface{}) error { + val, err := proto.GetExtension(msg, test.ext) + if err != nil { + if valWant != nil { + return fmt.Errorf("GetExtension(): %s", err) + } + if want := proto.ErrMissingExtension; err != want { + return fmt.Errorf("Unexpected error: got %v, want %v", err, want) + } + return nil + } + + // All proto2 extension values are either a pointer to a value or a slice of values. + ty := reflect.TypeOf(val) + tyWant := reflect.TypeOf(test.ext.ExtensionType) + if got, want := ty, tyWant; got != want { + return fmt.Errorf("unexpected reflect.TypeOf(): got %v want %v", got, want) + } + tye := ty.Elem() + tyeWant := tyWant.Elem() + if got, want := tye, tyeWant; got != want { + return fmt.Errorf("unexpected reflect.TypeOf().Elem(): got %v want %v", got, want) + } + + // Check the name of the type of the value. + // If it is an enum it will be type int32 with the name of the enum. + if got, want := tye.Name(), tye.Name(); got != want { + return fmt.Errorf("unexpected reflect.TypeOf().Elem().Name(): got %v want %v", got, want) + } + + // Check that value is what we expect. + // If we have a pointer in val, get the value it points to. + valExp := val + if ty.Kind() == reflect.Ptr { + valExp = reflect.ValueOf(val).Elem().Interface() + } + if got, want := valExp, valWant; !reflect.DeepEqual(got, want) { + return fmt.Errorf("unexpected reflect.DeepEqual(): got %v want %v", got, want) + } + + return nil + } + + setTo := func(test testcase) interface{} { + setTo := reflect.ValueOf(test.want) + if typ := reflect.TypeOf(test.ext.ExtensionType); typ.Kind() == reflect.Ptr { + setTo = reflect.New(typ).Elem() + setTo.Set(reflect.New(setTo.Type().Elem())) + setTo.Elem().Set(reflect.ValueOf(test.want)) + } + return setTo.Interface() + } + + for _, test := range tests { + msg := &pb.DefaultsMessage{} + name := test.ext.Name + + // Check the initial value. + if err := checkVal(test, msg, test.def); err != nil { + t.Errorf("%s: %v", name, err) + } + + // Set the per-type value and check value. + name = fmt.Sprintf("%s (set to %T %v)", name, test.want, test.want) + if err := proto.SetExtension(msg, test.ext, setTo(test)); err != nil { + t.Errorf("%s: SetExtension(): %v", name, err) + continue + } + if err := checkVal(test, msg, test.want); err != nil { + t.Errorf("%s: %v", name, err) + continue + } + + // Set and check the value. + name += " (cleared)" + proto.ClearExtension(msg, test.ext) + if err := checkVal(test, msg, test.def); err != nil { + t.Errorf("%s: %v", name, err) + } + } +} + +func TestExtensionsRoundTrip(t *testing.T) { + msg := &pb.MyMessage{} + ext1 := &pb.Ext{ + Data: proto.String("hi"), + } + ext2 := &pb.Ext{ + Data: proto.String("there"), + } + exists := proto.HasExtension(msg, pb.E_Ext_More) + if exists { + t.Error("Extension More present unexpectedly") + } + if err := proto.SetExtension(msg, pb.E_Ext_More, ext1); err != nil { + t.Error(err) + } + if err := proto.SetExtension(msg, pb.E_Ext_More, ext2); err != nil { + t.Error(err) + } + e, err := proto.GetExtension(msg, pb.E_Ext_More) + if err != nil { + t.Error(err) + } + x, ok := e.(*pb.Ext) + if !ok { + t.Errorf("e has type %T, expected testdata.Ext", e) + } else if *x.Data != "there" { + t.Errorf("SetExtension failed to overwrite, got %+v, not 'there'", x) + } + proto.ClearExtension(msg, pb.E_Ext_More) + if _, err = proto.GetExtension(msg, pb.E_Ext_More); err != proto.ErrMissingExtension { + t.Errorf("got %v, expected ErrMissingExtension", e) + } + if _, err := proto.GetExtension(msg, pb.E_X215); err == nil { + t.Error("expected bad extension error, got nil") + } + if err := proto.SetExtension(msg, pb.E_X215, 12); err == nil { + t.Error("expected extension err") + } + if err := proto.SetExtension(msg, pb.E_Ext_More, 12); err == nil { + t.Error("expected some sort of type mismatch error, got nil") + } +} + +func TestNilExtension(t *testing.T) { + msg := &pb.MyMessage{ + Count: proto.Int32(1), + } + if err := proto.SetExtension(msg, pb.E_Ext_Text, proto.String("hello")); err != nil { + t.Fatal(err) + } + if err := proto.SetExtension(msg, pb.E_Ext_More, (*pb.Ext)(nil)); err == nil { + t.Error("expected SetExtension to fail due to a nil extension") + } else if want := "proto: SetExtension called with nil value of type *testdata.Ext"; err.Error() != want { + t.Errorf("expected error %v, got %v", want, err) + } + // Note: if the behavior of Marshal is ever changed to ignore nil extensions, update + // this test to verify that E_Ext_Text is properly propagated through marshal->unmarshal. +} + +func TestMarshalUnmarshalRepeatedExtension(t *testing.T) { + // Add a repeated extension to the result. + tests := []struct { + name string + ext []*pb.ComplexExtension + }{ + { + "two fields", + []*pb.ComplexExtension{ + {First: proto.Int32(7)}, + {Second: proto.Int32(11)}, + }, + }, + { + "repeated field", + []*pb.ComplexExtension{ + {Third: []int32{1000}}, + {Third: []int32{2000}}, + }, + }, + { + "two fields and repeated field", + []*pb.ComplexExtension{ + {Third: []int32{1000}}, + {First: proto.Int32(9)}, + {Second: proto.Int32(21)}, + {Third: []int32{2000}}, + }, + }, + } + for _, test := range tests { + // Marshal message with a repeated extension. + msg1 := new(pb.OtherMessage) + err := proto.SetExtension(msg1, pb.E_RComplex, test.ext) + if err != nil { + t.Fatalf("[%s] Error setting extension: %v", test.name, err) + } + b, err := proto.Marshal(msg1) + if err != nil { + t.Fatalf("[%s] Error marshaling message: %v", test.name, err) + } + + // Unmarshal and read the merged proto. + msg2 := new(pb.OtherMessage) + err = proto.Unmarshal(b, msg2) + if err != nil { + t.Fatalf("[%s] Error unmarshaling message: %v", test.name, err) + } + e, err := proto.GetExtension(msg2, pb.E_RComplex) + if err != nil { + t.Fatalf("[%s] Error getting extension: %v", test.name, err) + } + ext := e.([]*pb.ComplexExtension) + if ext == nil { + t.Fatalf("[%s] Invalid extension", test.name) + } + if !reflect.DeepEqual(ext, test.ext) { + t.Errorf("[%s] Wrong value for ComplexExtension: got: %v want: %v\n", test.name, ext, test.ext) + } + } +} + +func TestUnmarshalRepeatingNonRepeatedExtension(t *testing.T) { + // We may see multiple instances of the same extension in the wire + // format. For example, the proto compiler may encode custom options in + // this way. Here, we verify that we merge the extensions together. + tests := []struct { + name string + ext []*pb.ComplexExtension + }{ + { + "two fields", + []*pb.ComplexExtension{ + {First: proto.Int32(7)}, + {Second: proto.Int32(11)}, + }, + }, + { + "repeated field", + []*pb.ComplexExtension{ + {Third: []int32{1000}}, + {Third: []int32{2000}}, + }, + }, + { + "two fields and repeated field", + []*pb.ComplexExtension{ + {Third: []int32{1000}}, + {First: proto.Int32(9)}, + {Second: proto.Int32(21)}, + {Third: []int32{2000}}, + }, + }, + } + for _, test := range tests { + var buf bytes.Buffer + var want pb.ComplexExtension + + // Generate a serialized representation of a repeated extension + // by catenating bytes together. + for i, e := range test.ext { + // Merge to create the wanted proto. + proto.Merge(&want, e) + + // serialize the message + msg := new(pb.OtherMessage) + err := proto.SetExtension(msg, pb.E_Complex, e) + if err != nil { + t.Fatalf("[%s] Error setting extension %d: %v", test.name, i, err) + } + b, err := proto.Marshal(msg) + if err != nil { + t.Fatalf("[%s] Error marshaling message %d: %v", test.name, i, err) + } + buf.Write(b) + } + + // Unmarshal and read the merged proto. + msg2 := new(pb.OtherMessage) + err := proto.Unmarshal(buf.Bytes(), msg2) + if err != nil { + t.Fatalf("[%s] Error unmarshaling message: %v", test.name, err) + } + e, err := proto.GetExtension(msg2, pb.E_Complex) + if err != nil { + t.Fatalf("[%s] Error getting extension: %v", test.name, err) + } + ext := e.(*pb.ComplexExtension) + if ext == nil { + t.Fatalf("[%s] Invalid extension", test.name) + } + if !reflect.DeepEqual(*ext, want) { + t.Errorf("[%s] Wrong value for ComplexExtension: got: %s want: %s\n", test.name, ext, want) + } + } +} + +func TestClearAllExtensions(t *testing.T) { + // unregistered extension + desc := &proto.ExtensionDesc{ + ExtendedType: (*pb.MyMessage)(nil), + ExtensionType: (*bool)(nil), + Field: 101010100, + Name: "emptyextension", + Tag: "varint,0,opt", + } + m := &pb.MyMessage{} + if proto.HasExtension(m, desc) { + t.Errorf("proto.HasExtension(%s): got true, want false", proto.MarshalTextString(m)) + } + if err := proto.SetExtension(m, desc, proto.Bool(true)); err != nil { + t.Errorf("proto.SetExtension(m, desc, true): got error %q, want nil", err) + } + if !proto.HasExtension(m, desc) { + t.Errorf("proto.HasExtension(%s): got false, want true", proto.MarshalTextString(m)) + } + proto.ClearAllExtensions(m) + if proto.HasExtension(m, desc) { + t.Errorf("proto.HasExtension(%s): got true, want false", proto.MarshalTextString(m)) + } +} + +func TestMarshalRace(t *testing.T) { + // unregistered extension + desc := &proto.ExtensionDesc{ + ExtendedType: (*pb.MyMessage)(nil), + ExtensionType: (*bool)(nil), + Field: 101010100, + Name: "emptyextension", + Tag: "varint,0,opt", + } + + m := &pb.MyMessage{Count: proto.Int32(4)} + if err := proto.SetExtension(m, desc, proto.Bool(true)); err != nil { + t.Errorf("proto.SetExtension(m, desc, true): got error %q, want nil", err) + } + + var g errgroup.Group + for n := 3; n > 0; n-- { + g.Go(func() error { + _, err := proto.Marshal(m) + return err + }) + } + if err := g.Wait(); err != nil { + t.Fatal(err) + } +} diff --git a/vendor/github.com/golang/protobuf/proto/lib.go b/vendor/github.com/golang/protobuf/proto/lib.go new file mode 100644 index 000000000..1c225504a --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/lib.go @@ -0,0 +1,897 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2010 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +/* +Package proto converts data structures to and from the wire format of +protocol buffers. It works in concert with the Go source code generated +for .proto files by the protocol compiler. + +A summary of the properties of the protocol buffer interface +for a protocol buffer variable v: + + - Names are turned from camel_case to CamelCase for export. + - There are no methods on v to set fields; just treat + them as structure fields. + - There are getters that return a field's value if set, + and return the field's default value if unset. + The getters work even if the receiver is a nil message. + - The zero value for a struct is its correct initialization state. + All desired fields must be set before marshaling. + - A Reset() method will restore a protobuf struct to its zero state. + - Non-repeated fields are pointers to the values; nil means unset. + That is, optional or required field int32 f becomes F *int32. + - Repeated fields are slices. + - Helper functions are available to aid the setting of fields. + msg.Foo = proto.String("hello") // set field + - Constants are defined to hold the default values of all fields that + have them. They have the form Default_StructName_FieldName. + Because the getter methods handle defaulted values, + direct use of these constants should be rare. + - Enums are given type names and maps from names to values. + Enum values are prefixed by the enclosing message's name, or by the + enum's type name if it is a top-level enum. Enum types have a String + method, and a Enum method to assist in message construction. + - Nested messages, groups and enums have type names prefixed with the name of + the surrounding message type. + - Extensions are given descriptor names that start with E_, + followed by an underscore-delimited list of the nested messages + that contain it (if any) followed by the CamelCased name of the + extension field itself. HasExtension, ClearExtension, GetExtension + and SetExtension are functions for manipulating extensions. + - Oneof field sets are given a single field in their message, + with distinguished wrapper types for each possible field value. + - Marshal and Unmarshal are functions to encode and decode the wire format. + +When the .proto file specifies `syntax="proto3"`, there are some differences: + + - Non-repeated fields of non-message type are values instead of pointers. + - Enum types do not get an Enum method. + +The simplest way to describe this is to see an example. +Given file test.proto, containing + + package example; + + enum FOO { X = 17; } + + message Test { + required string label = 1; + optional int32 type = 2 [default=77]; + repeated int64 reps = 3; + optional group OptionalGroup = 4 { + required string RequiredField = 5; + } + oneof union { + int32 number = 6; + string name = 7; + } + } + +The resulting file, test.pb.go, is: + + package example + + import proto "github.com/golang/protobuf/proto" + import math "math" + + type FOO int32 + const ( + FOO_X FOO = 17 + ) + var FOO_name = map[int32]string{ + 17: "X", + } + var FOO_value = map[string]int32{ + "X": 17, + } + + func (x FOO) Enum() *FOO { + p := new(FOO) + *p = x + return p + } + func (x FOO) String() string { + return proto.EnumName(FOO_name, int32(x)) + } + func (x *FOO) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(FOO_value, data) + if err != nil { + return err + } + *x = FOO(value) + return nil + } + + type Test struct { + Label *string `protobuf:"bytes,1,req,name=label" json:"label,omitempty"` + Type *int32 `protobuf:"varint,2,opt,name=type,def=77" json:"type,omitempty"` + Reps []int64 `protobuf:"varint,3,rep,name=reps" json:"reps,omitempty"` + Optionalgroup *Test_OptionalGroup `protobuf:"group,4,opt,name=OptionalGroup" json:"optionalgroup,omitempty"` + // Types that are valid to be assigned to Union: + // *Test_Number + // *Test_Name + Union isTest_Union `protobuf_oneof:"union"` + XXX_unrecognized []byte `json:"-"` + } + func (m *Test) Reset() { *m = Test{} } + func (m *Test) String() string { return proto.CompactTextString(m) } + func (*Test) ProtoMessage() {} + + type isTest_Union interface { + isTest_Union() + } + + type Test_Number struct { + Number int32 `protobuf:"varint,6,opt,name=number"` + } + type Test_Name struct { + Name string `protobuf:"bytes,7,opt,name=name"` + } + + func (*Test_Number) isTest_Union() {} + func (*Test_Name) isTest_Union() {} + + func (m *Test) GetUnion() isTest_Union { + if m != nil { + return m.Union + } + return nil + } + const Default_Test_Type int32 = 77 + + func (m *Test) GetLabel() string { + if m != nil && m.Label != nil { + return *m.Label + } + return "" + } + + func (m *Test) GetType() int32 { + if m != nil && m.Type != nil { + return *m.Type + } + return Default_Test_Type + } + + func (m *Test) GetOptionalgroup() *Test_OptionalGroup { + if m != nil { + return m.Optionalgroup + } + return nil + } + + type Test_OptionalGroup struct { + RequiredField *string `protobuf:"bytes,5,req" json:"RequiredField,omitempty"` + } + func (m *Test_OptionalGroup) Reset() { *m = Test_OptionalGroup{} } + func (m *Test_OptionalGroup) String() string { return proto.CompactTextString(m) } + + func (m *Test_OptionalGroup) GetRequiredField() string { + if m != nil && m.RequiredField != nil { + return *m.RequiredField + } + return "" + } + + func (m *Test) GetNumber() int32 { + if x, ok := m.GetUnion().(*Test_Number); ok { + return x.Number + } + return 0 + } + + func (m *Test) GetName() string { + if x, ok := m.GetUnion().(*Test_Name); ok { + return x.Name + } + return "" + } + + func init() { + proto.RegisterEnum("example.FOO", FOO_name, FOO_value) + } + +To create and play with a Test object: + + package main + + import ( + "log" + + "github.com/golang/protobuf/proto" + pb "./example.pb" + ) + + func main() { + test := &pb.Test{ + Label: proto.String("hello"), + Type: proto.Int32(17), + Reps: []int64{1, 2, 3}, + Optionalgroup: &pb.Test_OptionalGroup{ + RequiredField: proto.String("good bye"), + }, + Union: &pb.Test_Name{"fred"}, + } + data, err := proto.Marshal(test) + if err != nil { + log.Fatal("marshaling error: ", err) + } + newTest := &pb.Test{} + err = proto.Unmarshal(data, newTest) + if err != nil { + log.Fatal("unmarshaling error: ", err) + } + // Now test and newTest contain the same data. + if test.GetLabel() != newTest.GetLabel() { + log.Fatalf("data mismatch %q != %q", test.GetLabel(), newTest.GetLabel()) + } + // Use a type switch to determine which oneof was set. + switch u := test.Union.(type) { + case *pb.Test_Number: // u.Number contains the number. + case *pb.Test_Name: // u.Name contains the string. + } + // etc. + } +*/ +package proto + +import ( + "encoding/json" + "fmt" + "log" + "reflect" + "sort" + "strconv" + "sync" +) + +// Message is implemented by generated protocol buffer messages. +type Message interface { + Reset() + String() string + ProtoMessage() +} + +// Stats records allocation details about the protocol buffer encoders +// and decoders. Useful for tuning the library itself. +type Stats struct { + Emalloc uint64 // mallocs in encode + Dmalloc uint64 // mallocs in decode + Encode uint64 // number of encodes + Decode uint64 // number of decodes + Chit uint64 // number of cache hits + Cmiss uint64 // number of cache misses + Size uint64 // number of sizes +} + +// Set to true to enable stats collection. +const collectStats = false + +var stats Stats + +// GetStats returns a copy of the global Stats structure. +func GetStats() Stats { return stats } + +// A Buffer is a buffer manager for marshaling and unmarshaling +// protocol buffers. It may be reused between invocations to +// reduce memory usage. It is not necessary to use a Buffer; +// the global functions Marshal and Unmarshal create a +// temporary Buffer and are fine for most applications. +type Buffer struct { + buf []byte // encode/decode byte stream + index int // read point + + // pools of basic types to amortize allocation. + bools []bool + uint32s []uint32 + uint64s []uint64 + + // extra pools, only used with pointer_reflect.go + int32s []int32 + int64s []int64 + float32s []float32 + float64s []float64 +} + +// NewBuffer allocates a new Buffer and initializes its internal data to +// the contents of the argument slice. +func NewBuffer(e []byte) *Buffer { + return &Buffer{buf: e} +} + +// Reset resets the Buffer, ready for marshaling a new protocol buffer. +func (p *Buffer) Reset() { + p.buf = p.buf[0:0] // for reading/writing + p.index = 0 // for reading +} + +// SetBuf replaces the internal buffer with the slice, +// ready for unmarshaling the contents of the slice. +func (p *Buffer) SetBuf(s []byte) { + p.buf = s + p.index = 0 +} + +// Bytes returns the contents of the Buffer. +func (p *Buffer) Bytes() []byte { return p.buf } + +/* + * Helper routines for simplifying the creation of optional fields of basic type. + */ + +// Bool is a helper routine that allocates a new bool value +// to store v and returns a pointer to it. +func Bool(v bool) *bool { + return &v +} + +// Int32 is a helper routine that allocates a new int32 value +// to store v and returns a pointer to it. +func Int32(v int32) *int32 { + return &v +} + +// Int is a helper routine that allocates a new int32 value +// to store v and returns a pointer to it, but unlike Int32 +// its argument value is an int. +func Int(v int) *int32 { + p := new(int32) + *p = int32(v) + return p +} + +// Int64 is a helper routine that allocates a new int64 value +// to store v and returns a pointer to it. +func Int64(v int64) *int64 { + return &v +} + +// Float32 is a helper routine that allocates a new float32 value +// to store v and returns a pointer to it. +func Float32(v float32) *float32 { + return &v +} + +// Float64 is a helper routine that allocates a new float64 value +// to store v and returns a pointer to it. +func Float64(v float64) *float64 { + return &v +} + +// Uint32 is a helper routine that allocates a new uint32 value +// to store v and returns a pointer to it. +func Uint32(v uint32) *uint32 { + return &v +} + +// Uint64 is a helper routine that allocates a new uint64 value +// to store v and returns a pointer to it. +func Uint64(v uint64) *uint64 { + return &v +} + +// String is a helper routine that allocates a new string value +// to store v and returns a pointer to it. +func String(v string) *string { + return &v +} + +// EnumName is a helper function to simplify printing protocol buffer enums +// by name. Given an enum map and a value, it returns a useful string. +func EnumName(m map[int32]string, v int32) string { + s, ok := m[v] + if ok { + return s + } + return strconv.Itoa(int(v)) +} + +// UnmarshalJSONEnum is a helper function to simplify recovering enum int values +// from their JSON-encoded representation. Given a map from the enum's symbolic +// names to its int values, and a byte buffer containing the JSON-encoded +// value, it returns an int32 that can be cast to the enum type by the caller. +// +// The function can deal with both JSON representations, numeric and symbolic. +func UnmarshalJSONEnum(m map[string]int32, data []byte, enumName string) (int32, error) { + if data[0] == '"' { + // New style: enums are strings. + var repr string + if err := json.Unmarshal(data, &repr); err != nil { + return -1, err + } + val, ok := m[repr] + if !ok { + return 0, fmt.Errorf("unrecognized enum %s value %q", enumName, repr) + } + return val, nil + } + // Old style: enums are ints. + var val int32 + if err := json.Unmarshal(data, &val); err != nil { + return 0, fmt.Errorf("cannot unmarshal %#q into enum %s", data, enumName) + } + return val, nil +} + +// DebugPrint dumps the encoded data in b in a debugging format with a header +// including the string s. Used in testing but made available for general debugging. +func (p *Buffer) DebugPrint(s string, b []byte) { + var u uint64 + + obuf := p.buf + index := p.index + p.buf = b + p.index = 0 + depth := 0 + + fmt.Printf("\n--- %s ---\n", s) + +out: + for { + for i := 0; i < depth; i++ { + fmt.Print(" ") + } + + index := p.index + if index == len(p.buf) { + break + } + + op, err := p.DecodeVarint() + if err != nil { + fmt.Printf("%3d: fetching op err %v\n", index, err) + break out + } + tag := op >> 3 + wire := op & 7 + + switch wire { + default: + fmt.Printf("%3d: t=%3d unknown wire=%d\n", + index, tag, wire) + break out + + case WireBytes: + var r []byte + + r, err = p.DecodeRawBytes(false) + if err != nil { + break out + } + fmt.Printf("%3d: t=%3d bytes [%d]", index, tag, len(r)) + if len(r) <= 6 { + for i := 0; i < len(r); i++ { + fmt.Printf(" %.2x", r[i]) + } + } else { + for i := 0; i < 3; i++ { + fmt.Printf(" %.2x", r[i]) + } + fmt.Printf(" ..") + for i := len(r) - 3; i < len(r); i++ { + fmt.Printf(" %.2x", r[i]) + } + } + fmt.Printf("\n") + + case WireFixed32: + u, err = p.DecodeFixed32() + if err != nil { + fmt.Printf("%3d: t=%3d fix32 err %v\n", index, tag, err) + break out + } + fmt.Printf("%3d: t=%3d fix32 %d\n", index, tag, u) + + case WireFixed64: + u, err = p.DecodeFixed64() + if err != nil { + fmt.Printf("%3d: t=%3d fix64 err %v\n", index, tag, err) + break out + } + fmt.Printf("%3d: t=%3d fix64 %d\n", index, tag, u) + + case WireVarint: + u, err = p.DecodeVarint() + if err != nil { + fmt.Printf("%3d: t=%3d varint err %v\n", index, tag, err) + break out + } + fmt.Printf("%3d: t=%3d varint %d\n", index, tag, u) + + case WireStartGroup: + fmt.Printf("%3d: t=%3d start\n", index, tag) + depth++ + + case WireEndGroup: + depth-- + fmt.Printf("%3d: t=%3d end\n", index, tag) + } + } + + if depth != 0 { + fmt.Printf("%3d: start-end not balanced %d\n", p.index, depth) + } + fmt.Printf("\n") + + p.buf = obuf + p.index = index +} + +// SetDefaults sets unset protocol buffer fields to their default values. +// It only modifies fields that are both unset and have defined defaults. +// It recursively sets default values in any non-nil sub-messages. +func SetDefaults(pb Message) { + setDefaults(reflect.ValueOf(pb), true, false) +} + +// v is a pointer to a struct. +func setDefaults(v reflect.Value, recur, zeros bool) { + v = v.Elem() + + defaultMu.RLock() + dm, ok := defaults[v.Type()] + defaultMu.RUnlock() + if !ok { + dm = buildDefaultMessage(v.Type()) + defaultMu.Lock() + defaults[v.Type()] = dm + defaultMu.Unlock() + } + + for _, sf := range dm.scalars { + f := v.Field(sf.index) + if !f.IsNil() { + // field already set + continue + } + dv := sf.value + if dv == nil && !zeros { + // no explicit default, and don't want to set zeros + continue + } + fptr := f.Addr().Interface() // **T + // TODO: Consider batching the allocations we do here. + switch sf.kind { + case reflect.Bool: + b := new(bool) + if dv != nil { + *b = dv.(bool) + } + *(fptr.(**bool)) = b + case reflect.Float32: + f := new(float32) + if dv != nil { + *f = dv.(float32) + } + *(fptr.(**float32)) = f + case reflect.Float64: + f := new(float64) + if dv != nil { + *f = dv.(float64) + } + *(fptr.(**float64)) = f + case reflect.Int32: + // might be an enum + if ft := f.Type(); ft != int32PtrType { + // enum + f.Set(reflect.New(ft.Elem())) + if dv != nil { + f.Elem().SetInt(int64(dv.(int32))) + } + } else { + // int32 field + i := new(int32) + if dv != nil { + *i = dv.(int32) + } + *(fptr.(**int32)) = i + } + case reflect.Int64: + i := new(int64) + if dv != nil { + *i = dv.(int64) + } + *(fptr.(**int64)) = i + case reflect.String: + s := new(string) + if dv != nil { + *s = dv.(string) + } + *(fptr.(**string)) = s + case reflect.Uint8: + // exceptional case: []byte + var b []byte + if dv != nil { + db := dv.([]byte) + b = make([]byte, len(db)) + copy(b, db) + } else { + b = []byte{} + } + *(fptr.(*[]byte)) = b + case reflect.Uint32: + u := new(uint32) + if dv != nil { + *u = dv.(uint32) + } + *(fptr.(**uint32)) = u + case reflect.Uint64: + u := new(uint64) + if dv != nil { + *u = dv.(uint64) + } + *(fptr.(**uint64)) = u + default: + log.Printf("proto: can't set default for field %v (sf.kind=%v)", f, sf.kind) + } + } + + for _, ni := range dm.nested { + f := v.Field(ni) + // f is *T or []*T or map[T]*T + switch f.Kind() { + case reflect.Ptr: + if f.IsNil() { + continue + } + setDefaults(f, recur, zeros) + + case reflect.Slice: + for i := 0; i < f.Len(); i++ { + e := f.Index(i) + if e.IsNil() { + continue + } + setDefaults(e, recur, zeros) + } + + case reflect.Map: + for _, k := range f.MapKeys() { + e := f.MapIndex(k) + if e.IsNil() { + continue + } + setDefaults(e, recur, zeros) + } + } + } +} + +var ( + // defaults maps a protocol buffer struct type to a slice of the fields, + // with its scalar fields set to their proto-declared non-zero default values. + defaultMu sync.RWMutex + defaults = make(map[reflect.Type]defaultMessage) + + int32PtrType = reflect.TypeOf((*int32)(nil)) +) + +// defaultMessage represents information about the default values of a message. +type defaultMessage struct { + scalars []scalarField + nested []int // struct field index of nested messages +} + +type scalarField struct { + index int // struct field index + kind reflect.Kind // element type (the T in *T or []T) + value interface{} // the proto-declared default value, or nil +} + +// t is a struct type. +func buildDefaultMessage(t reflect.Type) (dm defaultMessage) { + sprop := GetProperties(t) + for _, prop := range sprop.Prop { + fi, ok := sprop.decoderTags.get(prop.Tag) + if !ok { + // XXX_unrecognized + continue + } + ft := t.Field(fi).Type + + sf, nested, err := fieldDefault(ft, prop) + switch { + case err != nil: + log.Print(err) + case nested: + dm.nested = append(dm.nested, fi) + case sf != nil: + sf.index = fi + dm.scalars = append(dm.scalars, *sf) + } + } + + return dm +} + +// fieldDefault returns the scalarField for field type ft. +// sf will be nil if the field can not have a default. +// nestedMessage will be true if this is a nested message. +// Note that sf.index is not set on return. +func fieldDefault(ft reflect.Type, prop *Properties) (sf *scalarField, nestedMessage bool, err error) { + var canHaveDefault bool + switch ft.Kind() { + case reflect.Ptr: + if ft.Elem().Kind() == reflect.Struct { + nestedMessage = true + } else { + canHaveDefault = true // proto2 scalar field + } + + case reflect.Slice: + switch ft.Elem().Kind() { + case reflect.Ptr: + nestedMessage = true // repeated message + case reflect.Uint8: + canHaveDefault = true // bytes field + } + + case reflect.Map: + if ft.Elem().Kind() == reflect.Ptr { + nestedMessage = true // map with message values + } + } + + if !canHaveDefault { + if nestedMessage { + return nil, true, nil + } + return nil, false, nil + } + + // We now know that ft is a pointer or slice. + sf = &scalarField{kind: ft.Elem().Kind()} + + // scalar fields without defaults + if !prop.HasDefault { + return sf, false, nil + } + + // a scalar field: either *T or []byte + switch ft.Elem().Kind() { + case reflect.Bool: + x, err := strconv.ParseBool(prop.Default) + if err != nil { + return nil, false, fmt.Errorf("proto: bad default bool %q: %v", prop.Default, err) + } + sf.value = x + case reflect.Float32: + x, err := strconv.ParseFloat(prop.Default, 32) + if err != nil { + return nil, false, fmt.Errorf("proto: bad default float32 %q: %v", prop.Default, err) + } + sf.value = float32(x) + case reflect.Float64: + x, err := strconv.ParseFloat(prop.Default, 64) + if err != nil { + return nil, false, fmt.Errorf("proto: bad default float64 %q: %v", prop.Default, err) + } + sf.value = x + case reflect.Int32: + x, err := strconv.ParseInt(prop.Default, 10, 32) + if err != nil { + return nil, false, fmt.Errorf("proto: bad default int32 %q: %v", prop.Default, err) + } + sf.value = int32(x) + case reflect.Int64: + x, err := strconv.ParseInt(prop.Default, 10, 64) + if err != nil { + return nil, false, fmt.Errorf("proto: bad default int64 %q: %v", prop.Default, err) + } + sf.value = x + case reflect.String: + sf.value = prop.Default + case reflect.Uint8: + // []byte (not *uint8) + sf.value = []byte(prop.Default) + case reflect.Uint32: + x, err := strconv.ParseUint(prop.Default, 10, 32) + if err != nil { + return nil, false, fmt.Errorf("proto: bad default uint32 %q: %v", prop.Default, err) + } + sf.value = uint32(x) + case reflect.Uint64: + x, err := strconv.ParseUint(prop.Default, 10, 64) + if err != nil { + return nil, false, fmt.Errorf("proto: bad default uint64 %q: %v", prop.Default, err) + } + sf.value = x + default: + return nil, false, fmt.Errorf("proto: unhandled def kind %v", ft.Elem().Kind()) + } + + return sf, false, nil +} + +// Map fields may have key types of non-float scalars, strings and enums. +// The easiest way to sort them in some deterministic order is to use fmt. +// If this turns out to be inefficient we can always consider other options, +// such as doing a Schwartzian transform. + +func mapKeys(vs []reflect.Value) sort.Interface { + s := mapKeySorter{ + vs: vs, + // default Less function: textual comparison + less: func(a, b reflect.Value) bool { + return fmt.Sprint(a.Interface()) < fmt.Sprint(b.Interface()) + }, + } + + // Type specialization per https://developers.google.com/protocol-buffers/docs/proto#maps; + // numeric keys are sorted numerically. + if len(vs) == 0 { + return s + } + switch vs[0].Kind() { + case reflect.Int32, reflect.Int64: + s.less = func(a, b reflect.Value) bool { return a.Int() < b.Int() } + case reflect.Uint32, reflect.Uint64: + s.less = func(a, b reflect.Value) bool { return a.Uint() < b.Uint() } + } + + return s +} + +type mapKeySorter struct { + vs []reflect.Value + less func(a, b reflect.Value) bool +} + +func (s mapKeySorter) Len() int { return len(s.vs) } +func (s mapKeySorter) Swap(i, j int) { s.vs[i], s.vs[j] = s.vs[j], s.vs[i] } +func (s mapKeySorter) Less(i, j int) bool { + return s.less(s.vs[i], s.vs[j]) +} + +// isProto3Zero reports whether v is a zero proto3 value. +func isProto3Zero(v reflect.Value) bool { + switch v.Kind() { + case reflect.Bool: + return !v.Bool() + case reflect.Int32, reflect.Int64: + return v.Int() == 0 + case reflect.Uint32, reflect.Uint64: + return v.Uint() == 0 + case reflect.Float32, reflect.Float64: + return v.Float() == 0 + case reflect.String: + return v.String() == "" + } + return false +} + +// ProtoPackageIsVersion2 is referenced from generated protocol buffer files +// to assert that that code is compatible with this version of the proto package. +const ProtoPackageIsVersion2 = true + +// ProtoPackageIsVersion1 is referenced from generated protocol buffer files +// to assert that that code is compatible with this version of the proto package. +const ProtoPackageIsVersion1 = true diff --git a/vendor/github.com/golang/protobuf/proto/map_test.go b/vendor/github.com/golang/protobuf/proto/map_test.go new file mode 100644 index 000000000..313e87924 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/map_test.go @@ -0,0 +1,46 @@ +package proto_test + +import ( + "fmt" + "testing" + + "github.com/golang/protobuf/proto" + ppb "github.com/golang/protobuf/proto/proto3_proto" +) + +func marshalled() []byte { + m := &ppb.IntMaps{} + for i := 0; i < 1000; i++ { + m.Maps = append(m.Maps, &ppb.IntMap{ + Rtt: map[int32]int32{1: 2}, + }) + } + b, err := proto.Marshal(m) + if err != nil { + panic(fmt.Sprintf("Can't marshal %+v: %v", m, err)) + } + return b +} + +func BenchmarkConcurrentMapUnmarshal(b *testing.B) { + in := marshalled() + b.RunParallel(func(pb *testing.PB) { + for pb.Next() { + var out ppb.IntMaps + if err := proto.Unmarshal(in, &out); err != nil { + b.Errorf("Can't unmarshal ppb.IntMaps: %v", err) + } + } + }) +} + +func BenchmarkSequentialMapUnmarshal(b *testing.B) { + in := marshalled() + b.ResetTimer() + for i := 0; i < b.N; i++ { + var out ppb.IntMaps + if err := proto.Unmarshal(in, &out); err != nil { + b.Errorf("Can't unmarshal ppb.IntMaps: %v", err) + } + } +} diff --git a/vendor/github.com/golang/protobuf/proto/message_set.go b/vendor/github.com/golang/protobuf/proto/message_set.go new file mode 100644 index 000000000..fd982decd --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/message_set.go @@ -0,0 +1,311 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2010 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package proto + +/* + * Support for message sets. + */ + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "reflect" + "sort" +) + +// errNoMessageTypeID occurs when a protocol buffer does not have a message type ID. +// A message type ID is required for storing a protocol buffer in a message set. +var errNoMessageTypeID = errors.New("proto does not have a message type ID") + +// The first two types (_MessageSet_Item and messageSet) +// model what the protocol compiler produces for the following protocol message: +// message MessageSet { +// repeated group Item = 1 { +// required int32 type_id = 2; +// required string message = 3; +// }; +// } +// That is the MessageSet wire format. We can't use a proto to generate these +// because that would introduce a circular dependency between it and this package. + +type _MessageSet_Item struct { + TypeId *int32 `protobuf:"varint,2,req,name=type_id"` + Message []byte `protobuf:"bytes,3,req,name=message"` +} + +type messageSet struct { + Item []*_MessageSet_Item `protobuf:"group,1,rep"` + XXX_unrecognized []byte + // TODO: caching? +} + +// Make sure messageSet is a Message. +var _ Message = (*messageSet)(nil) + +// messageTypeIder is an interface satisfied by a protocol buffer type +// that may be stored in a MessageSet. +type messageTypeIder interface { + MessageTypeId() int32 +} + +func (ms *messageSet) find(pb Message) *_MessageSet_Item { + mti, ok := pb.(messageTypeIder) + if !ok { + return nil + } + id := mti.MessageTypeId() + for _, item := range ms.Item { + if *item.TypeId == id { + return item + } + } + return nil +} + +func (ms *messageSet) Has(pb Message) bool { + if ms.find(pb) != nil { + return true + } + return false +} + +func (ms *messageSet) Unmarshal(pb Message) error { + if item := ms.find(pb); item != nil { + return Unmarshal(item.Message, pb) + } + if _, ok := pb.(messageTypeIder); !ok { + return errNoMessageTypeID + } + return nil // TODO: return error instead? +} + +func (ms *messageSet) Marshal(pb Message) error { + msg, err := Marshal(pb) + if err != nil { + return err + } + if item := ms.find(pb); item != nil { + // reuse existing item + item.Message = msg + return nil + } + + mti, ok := pb.(messageTypeIder) + if !ok { + return errNoMessageTypeID + } + + mtid := mti.MessageTypeId() + ms.Item = append(ms.Item, &_MessageSet_Item{ + TypeId: &mtid, + Message: msg, + }) + return nil +} + +func (ms *messageSet) Reset() { *ms = messageSet{} } +func (ms *messageSet) String() string { return CompactTextString(ms) } +func (*messageSet) ProtoMessage() {} + +// Support for the message_set_wire_format message option. + +func skipVarint(buf []byte) []byte { + i := 0 + for ; buf[i]&0x80 != 0; i++ { + } + return buf[i+1:] +} + +// MarshalMessageSet encodes the extension map represented by m in the message set wire format. +// It is called by generated Marshal methods on protocol buffer messages with the message_set_wire_format option. +func MarshalMessageSet(exts interface{}) ([]byte, error) { + var m map[int32]Extension + switch exts := exts.(type) { + case *XXX_InternalExtensions: + if err := encodeExtensions(exts); err != nil { + return nil, err + } + m, _ = exts.extensionsRead() + case map[int32]Extension: + if err := encodeExtensionsMap(exts); err != nil { + return nil, err + } + m = exts + default: + return nil, errors.New("proto: not an extension map") + } + + // Sort extension IDs to provide a deterministic encoding. + // See also enc_map in encode.go. + ids := make([]int, 0, len(m)) + for id := range m { + ids = append(ids, int(id)) + } + sort.Ints(ids) + + ms := &messageSet{Item: make([]*_MessageSet_Item, 0, len(m))} + for _, id := range ids { + e := m[int32(id)] + // Remove the wire type and field number varint, as well as the length varint. + msg := skipVarint(skipVarint(e.enc)) + + ms.Item = append(ms.Item, &_MessageSet_Item{ + TypeId: Int32(int32(id)), + Message: msg, + }) + } + return Marshal(ms) +} + +// UnmarshalMessageSet decodes the extension map encoded in buf in the message set wire format. +// It is called by generated Unmarshal methods on protocol buffer messages with the message_set_wire_format option. +func UnmarshalMessageSet(buf []byte, exts interface{}) error { + var m map[int32]Extension + switch exts := exts.(type) { + case *XXX_InternalExtensions: + m = exts.extensionsWrite() + case map[int32]Extension: + m = exts + default: + return errors.New("proto: not an extension map") + } + + ms := new(messageSet) + if err := Unmarshal(buf, ms); err != nil { + return err + } + for _, item := range ms.Item { + id := *item.TypeId + msg := item.Message + + // Restore wire type and field number varint, plus length varint. + // Be careful to preserve duplicate items. + b := EncodeVarint(uint64(id)<<3 | WireBytes) + if ext, ok := m[id]; ok { + // Existing data; rip off the tag and length varint + // so we join the new data correctly. + // We can assume that ext.enc is set because we are unmarshaling. + o := ext.enc[len(b):] // skip wire type and field number + _, n := DecodeVarint(o) // calculate length of length varint + o = o[n:] // skip length varint + msg = append(o, msg...) // join old data and new data + } + b = append(b, EncodeVarint(uint64(len(msg)))...) + b = append(b, msg...) + + m[id] = Extension{enc: b} + } + return nil +} + +// MarshalMessageSetJSON encodes the extension map represented by m in JSON format. +// It is called by generated MarshalJSON methods on protocol buffer messages with the message_set_wire_format option. +func MarshalMessageSetJSON(exts interface{}) ([]byte, error) { + var m map[int32]Extension + switch exts := exts.(type) { + case *XXX_InternalExtensions: + m, _ = exts.extensionsRead() + case map[int32]Extension: + m = exts + default: + return nil, errors.New("proto: not an extension map") + } + var b bytes.Buffer + b.WriteByte('{') + + // Process the map in key order for deterministic output. + ids := make([]int32, 0, len(m)) + for id := range m { + ids = append(ids, id) + } + sort.Sort(int32Slice(ids)) // int32Slice defined in text.go + + for i, id := range ids { + ext := m[id] + if i > 0 { + b.WriteByte(',') + } + + msd, ok := messageSetMap[id] + if !ok { + // Unknown type; we can't render it, so skip it. + continue + } + fmt.Fprintf(&b, `"[%s]":`, msd.name) + + x := ext.value + if x == nil { + x = reflect.New(msd.t.Elem()).Interface() + if err := Unmarshal(ext.enc, x.(Message)); err != nil { + return nil, err + } + } + d, err := json.Marshal(x) + if err != nil { + return nil, err + } + b.Write(d) + } + b.WriteByte('}') + return b.Bytes(), nil +} + +// UnmarshalMessageSetJSON decodes the extension map encoded in buf in JSON format. +// It is called by generated UnmarshalJSON methods on protocol buffer messages with the message_set_wire_format option. +func UnmarshalMessageSetJSON(buf []byte, exts interface{}) error { + // Common-case fast path. + if len(buf) == 0 || bytes.Equal(buf, []byte("{}")) { + return nil + } + + // This is fairly tricky, and it's not clear that it is needed. + return errors.New("TODO: UnmarshalMessageSetJSON not yet implemented") +} + +// A global registry of types that can be used in a MessageSet. + +var messageSetMap = make(map[int32]messageSetDesc) + +type messageSetDesc struct { + t reflect.Type // pointer to struct + name string +} + +// RegisterMessageSetType is called from the generated code. +func RegisterMessageSetType(m Message, fieldNum int32, name string) { + messageSetMap[fieldNum] = messageSetDesc{ + t: reflect.TypeOf(m), + name: name, + } +} diff --git a/vendor/github.com/golang/protobuf/proto/message_set_test.go b/vendor/github.com/golang/protobuf/proto/message_set_test.go new file mode 100644 index 000000000..353a3ea76 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/message_set_test.go @@ -0,0 +1,66 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2014 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package proto + +import ( + "bytes" + "testing" +) + +func TestUnmarshalMessageSetWithDuplicate(t *testing.T) { + // Check that a repeated message set entry will be concatenated. + in := &messageSet{ + Item: []*_MessageSet_Item{ + {TypeId: Int32(12345), Message: []byte("hoo")}, + {TypeId: Int32(12345), Message: []byte("hah")}, + }, + } + b, err := Marshal(in) + if err != nil { + t.Fatalf("Marshal: %v", err) + } + t.Logf("Marshaled bytes: %q", b) + + var extensions XXX_InternalExtensions + if err := UnmarshalMessageSet(b, &extensions); err != nil { + t.Fatalf("UnmarshalMessageSet: %v", err) + } + ext, ok := extensions.p.extensionMap[12345] + if !ok { + t.Fatalf("Didn't retrieve extension 12345; map is %v", extensions.p.extensionMap) + } + // Skip wire type/field number and length varints. + got := skipVarint(skipVarint(ext.enc)) + if want := []byte("hoohah"); !bytes.Equal(got, want) { + t.Errorf("Combined extension is %q, want %q", got, want) + } +} diff --git a/vendor/github.com/golang/protobuf/proto/pointer_reflect.go b/vendor/github.com/golang/protobuf/proto/pointer_reflect.go new file mode 100644 index 000000000..fb512e2e1 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/pointer_reflect.go @@ -0,0 +1,484 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2012 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// +build appengine js + +// This file contains an implementation of proto field accesses using package reflect. +// It is slower than the code in pointer_unsafe.go but it avoids package unsafe and can +// be used on App Engine. + +package proto + +import ( + "math" + "reflect" +) + +// A structPointer is a pointer to a struct. +type structPointer struct { + v reflect.Value +} + +// toStructPointer returns a structPointer equivalent to the given reflect value. +// The reflect value must itself be a pointer to a struct. +func toStructPointer(v reflect.Value) structPointer { + return structPointer{v} +} + +// IsNil reports whether p is nil. +func structPointer_IsNil(p structPointer) bool { + return p.v.IsNil() +} + +// Interface returns the struct pointer as an interface value. +func structPointer_Interface(p structPointer, _ reflect.Type) interface{} { + return p.v.Interface() +} + +// A field identifies a field in a struct, accessible from a structPointer. +// In this implementation, a field is identified by the sequence of field indices +// passed to reflect's FieldByIndex. +type field []int + +// toField returns a field equivalent to the given reflect field. +func toField(f *reflect.StructField) field { + return f.Index +} + +// invalidField is an invalid field identifier. +var invalidField = field(nil) + +// IsValid reports whether the field identifier is valid. +func (f field) IsValid() bool { return f != nil } + +// field returns the given field in the struct as a reflect value. +func structPointer_field(p structPointer, f field) reflect.Value { + // Special case: an extension map entry with a value of type T + // passes a *T to the struct-handling code with a zero field, + // expecting that it will be treated as equivalent to *struct{ X T }, + // which has the same memory layout. We have to handle that case + // specially, because reflect will panic if we call FieldByIndex on a + // non-struct. + if f == nil { + return p.v.Elem() + } + + return p.v.Elem().FieldByIndex(f) +} + +// ifield returns the given field in the struct as an interface value. +func structPointer_ifield(p structPointer, f field) interface{} { + return structPointer_field(p, f).Addr().Interface() +} + +// Bytes returns the address of a []byte field in the struct. +func structPointer_Bytes(p structPointer, f field) *[]byte { + return structPointer_ifield(p, f).(*[]byte) +} + +// BytesSlice returns the address of a [][]byte field in the struct. +func structPointer_BytesSlice(p structPointer, f field) *[][]byte { + return structPointer_ifield(p, f).(*[][]byte) +} + +// Bool returns the address of a *bool field in the struct. +func structPointer_Bool(p structPointer, f field) **bool { + return structPointer_ifield(p, f).(**bool) +} + +// BoolVal returns the address of a bool field in the struct. +func structPointer_BoolVal(p structPointer, f field) *bool { + return structPointer_ifield(p, f).(*bool) +} + +// BoolSlice returns the address of a []bool field in the struct. +func structPointer_BoolSlice(p structPointer, f field) *[]bool { + return structPointer_ifield(p, f).(*[]bool) +} + +// String returns the address of a *string field in the struct. +func structPointer_String(p structPointer, f field) **string { + return structPointer_ifield(p, f).(**string) +} + +// StringVal returns the address of a string field in the struct. +func structPointer_StringVal(p structPointer, f field) *string { + return structPointer_ifield(p, f).(*string) +} + +// StringSlice returns the address of a []string field in the struct. +func structPointer_StringSlice(p structPointer, f field) *[]string { + return structPointer_ifield(p, f).(*[]string) +} + +// Extensions returns the address of an extension map field in the struct. +func structPointer_Extensions(p structPointer, f field) *XXX_InternalExtensions { + return structPointer_ifield(p, f).(*XXX_InternalExtensions) +} + +// ExtMap returns the address of an extension map field in the struct. +func structPointer_ExtMap(p structPointer, f field) *map[int32]Extension { + return structPointer_ifield(p, f).(*map[int32]Extension) +} + +// NewAt returns the reflect.Value for a pointer to a field in the struct. +func structPointer_NewAt(p structPointer, f field, typ reflect.Type) reflect.Value { + return structPointer_field(p, f).Addr() +} + +// SetStructPointer writes a *struct field in the struct. +func structPointer_SetStructPointer(p structPointer, f field, q structPointer) { + structPointer_field(p, f).Set(q.v) +} + +// GetStructPointer reads a *struct field in the struct. +func structPointer_GetStructPointer(p structPointer, f field) structPointer { + return structPointer{structPointer_field(p, f)} +} + +// StructPointerSlice the address of a []*struct field in the struct. +func structPointer_StructPointerSlice(p structPointer, f field) structPointerSlice { + return structPointerSlice{structPointer_field(p, f)} +} + +// A structPointerSlice represents the address of a slice of pointers to structs +// (themselves messages or groups). That is, v.Type() is *[]*struct{...}. +type structPointerSlice struct { + v reflect.Value +} + +func (p structPointerSlice) Len() int { return p.v.Len() } +func (p structPointerSlice) Index(i int) structPointer { return structPointer{p.v.Index(i)} } +func (p structPointerSlice) Append(q structPointer) { + p.v.Set(reflect.Append(p.v, q.v)) +} + +var ( + int32Type = reflect.TypeOf(int32(0)) + uint32Type = reflect.TypeOf(uint32(0)) + float32Type = reflect.TypeOf(float32(0)) + int64Type = reflect.TypeOf(int64(0)) + uint64Type = reflect.TypeOf(uint64(0)) + float64Type = reflect.TypeOf(float64(0)) +) + +// A word32 represents a field of type *int32, *uint32, *float32, or *enum. +// That is, v.Type() is *int32, *uint32, *float32, or *enum and v is assignable. +type word32 struct { + v reflect.Value +} + +// IsNil reports whether p is nil. +func word32_IsNil(p word32) bool { + return p.v.IsNil() +} + +// Set sets p to point at a newly allocated word with bits set to x. +func word32_Set(p word32, o *Buffer, x uint32) { + t := p.v.Type().Elem() + switch t { + case int32Type: + if len(o.int32s) == 0 { + o.int32s = make([]int32, uint32PoolSize) + } + o.int32s[0] = int32(x) + p.v.Set(reflect.ValueOf(&o.int32s[0])) + o.int32s = o.int32s[1:] + return + case uint32Type: + if len(o.uint32s) == 0 { + o.uint32s = make([]uint32, uint32PoolSize) + } + o.uint32s[0] = x + p.v.Set(reflect.ValueOf(&o.uint32s[0])) + o.uint32s = o.uint32s[1:] + return + case float32Type: + if len(o.float32s) == 0 { + o.float32s = make([]float32, uint32PoolSize) + } + o.float32s[0] = math.Float32frombits(x) + p.v.Set(reflect.ValueOf(&o.float32s[0])) + o.float32s = o.float32s[1:] + return + } + + // must be enum + p.v.Set(reflect.New(t)) + p.v.Elem().SetInt(int64(int32(x))) +} + +// Get gets the bits pointed at by p, as a uint32. +func word32_Get(p word32) uint32 { + elem := p.v.Elem() + switch elem.Kind() { + case reflect.Int32: + return uint32(elem.Int()) + case reflect.Uint32: + return uint32(elem.Uint()) + case reflect.Float32: + return math.Float32bits(float32(elem.Float())) + } + panic("unreachable") +} + +// Word32 returns a reference to a *int32, *uint32, *float32, or *enum field in the struct. +func structPointer_Word32(p structPointer, f field) word32 { + return word32{structPointer_field(p, f)} +} + +// A word32Val represents a field of type int32, uint32, float32, or enum. +// That is, v.Type() is int32, uint32, float32, or enum and v is assignable. +type word32Val struct { + v reflect.Value +} + +// Set sets *p to x. +func word32Val_Set(p word32Val, x uint32) { + switch p.v.Type() { + case int32Type: + p.v.SetInt(int64(x)) + return + case uint32Type: + p.v.SetUint(uint64(x)) + return + case float32Type: + p.v.SetFloat(float64(math.Float32frombits(x))) + return + } + + // must be enum + p.v.SetInt(int64(int32(x))) +} + +// Get gets the bits pointed at by p, as a uint32. +func word32Val_Get(p word32Val) uint32 { + elem := p.v + switch elem.Kind() { + case reflect.Int32: + return uint32(elem.Int()) + case reflect.Uint32: + return uint32(elem.Uint()) + case reflect.Float32: + return math.Float32bits(float32(elem.Float())) + } + panic("unreachable") +} + +// Word32Val returns a reference to a int32, uint32, float32, or enum field in the struct. +func structPointer_Word32Val(p structPointer, f field) word32Val { + return word32Val{structPointer_field(p, f)} +} + +// A word32Slice is a slice of 32-bit values. +// That is, v.Type() is []int32, []uint32, []float32, or []enum. +type word32Slice struct { + v reflect.Value +} + +func (p word32Slice) Append(x uint32) { + n, m := p.v.Len(), p.v.Cap() + if n < m { + p.v.SetLen(n + 1) + } else { + t := p.v.Type().Elem() + p.v.Set(reflect.Append(p.v, reflect.Zero(t))) + } + elem := p.v.Index(n) + switch elem.Kind() { + case reflect.Int32: + elem.SetInt(int64(int32(x))) + case reflect.Uint32: + elem.SetUint(uint64(x)) + case reflect.Float32: + elem.SetFloat(float64(math.Float32frombits(x))) + } +} + +func (p word32Slice) Len() int { + return p.v.Len() +} + +func (p word32Slice) Index(i int) uint32 { + elem := p.v.Index(i) + switch elem.Kind() { + case reflect.Int32: + return uint32(elem.Int()) + case reflect.Uint32: + return uint32(elem.Uint()) + case reflect.Float32: + return math.Float32bits(float32(elem.Float())) + } + panic("unreachable") +} + +// Word32Slice returns a reference to a []int32, []uint32, []float32, or []enum field in the struct. +func structPointer_Word32Slice(p structPointer, f field) word32Slice { + return word32Slice{structPointer_field(p, f)} +} + +// word64 is like word32 but for 64-bit values. +type word64 struct { + v reflect.Value +} + +func word64_Set(p word64, o *Buffer, x uint64) { + t := p.v.Type().Elem() + switch t { + case int64Type: + if len(o.int64s) == 0 { + o.int64s = make([]int64, uint64PoolSize) + } + o.int64s[0] = int64(x) + p.v.Set(reflect.ValueOf(&o.int64s[0])) + o.int64s = o.int64s[1:] + return + case uint64Type: + if len(o.uint64s) == 0 { + o.uint64s = make([]uint64, uint64PoolSize) + } + o.uint64s[0] = x + p.v.Set(reflect.ValueOf(&o.uint64s[0])) + o.uint64s = o.uint64s[1:] + return + case float64Type: + if len(o.float64s) == 0 { + o.float64s = make([]float64, uint64PoolSize) + } + o.float64s[0] = math.Float64frombits(x) + p.v.Set(reflect.ValueOf(&o.float64s[0])) + o.float64s = o.float64s[1:] + return + } + panic("unreachable") +} + +func word64_IsNil(p word64) bool { + return p.v.IsNil() +} + +func word64_Get(p word64) uint64 { + elem := p.v.Elem() + switch elem.Kind() { + case reflect.Int64: + return uint64(elem.Int()) + case reflect.Uint64: + return elem.Uint() + case reflect.Float64: + return math.Float64bits(elem.Float()) + } + panic("unreachable") +} + +func structPointer_Word64(p structPointer, f field) word64 { + return word64{structPointer_field(p, f)} +} + +// word64Val is like word32Val but for 64-bit values. +type word64Val struct { + v reflect.Value +} + +func word64Val_Set(p word64Val, o *Buffer, x uint64) { + switch p.v.Type() { + case int64Type: + p.v.SetInt(int64(x)) + return + case uint64Type: + p.v.SetUint(x) + return + case float64Type: + p.v.SetFloat(math.Float64frombits(x)) + return + } + panic("unreachable") +} + +func word64Val_Get(p word64Val) uint64 { + elem := p.v + switch elem.Kind() { + case reflect.Int64: + return uint64(elem.Int()) + case reflect.Uint64: + return elem.Uint() + case reflect.Float64: + return math.Float64bits(elem.Float()) + } + panic("unreachable") +} + +func structPointer_Word64Val(p structPointer, f field) word64Val { + return word64Val{structPointer_field(p, f)} +} + +type word64Slice struct { + v reflect.Value +} + +func (p word64Slice) Append(x uint64) { + n, m := p.v.Len(), p.v.Cap() + if n < m { + p.v.SetLen(n + 1) + } else { + t := p.v.Type().Elem() + p.v.Set(reflect.Append(p.v, reflect.Zero(t))) + } + elem := p.v.Index(n) + switch elem.Kind() { + case reflect.Int64: + elem.SetInt(int64(int64(x))) + case reflect.Uint64: + elem.SetUint(uint64(x)) + case reflect.Float64: + elem.SetFloat(float64(math.Float64frombits(x))) + } +} + +func (p word64Slice) Len() int { + return p.v.Len() +} + +func (p word64Slice) Index(i int) uint64 { + elem := p.v.Index(i) + switch elem.Kind() { + case reflect.Int64: + return uint64(elem.Int()) + case reflect.Uint64: + return uint64(elem.Uint()) + case reflect.Float64: + return math.Float64bits(float64(elem.Float())) + } + panic("unreachable") +} + +func structPointer_Word64Slice(p structPointer, f field) word64Slice { + return word64Slice{structPointer_field(p, f)} +} diff --git a/vendor/github.com/golang/protobuf/proto/pointer_unsafe.go b/vendor/github.com/golang/protobuf/proto/pointer_unsafe.go new file mode 100644 index 000000000..6b5567d47 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/pointer_unsafe.go @@ -0,0 +1,270 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2012 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// +build !appengine,!js + +// This file contains the implementation of the proto field accesses using package unsafe. + +package proto + +import ( + "reflect" + "unsafe" +) + +// NOTE: These type_Foo functions would more idiomatically be methods, +// but Go does not allow methods on pointer types, and we must preserve +// some pointer type for the garbage collector. We use these +// funcs with clunky names as our poor approximation to methods. +// +// An alternative would be +// type structPointer struct { p unsafe.Pointer } +// but that does not registerize as well. + +// A structPointer is a pointer to a struct. +type structPointer unsafe.Pointer + +// toStructPointer returns a structPointer equivalent to the given reflect value. +func toStructPointer(v reflect.Value) structPointer { + return structPointer(unsafe.Pointer(v.Pointer())) +} + +// IsNil reports whether p is nil. +func structPointer_IsNil(p structPointer) bool { + return p == nil +} + +// Interface returns the struct pointer, assumed to have element type t, +// as an interface value. +func structPointer_Interface(p structPointer, t reflect.Type) interface{} { + return reflect.NewAt(t, unsafe.Pointer(p)).Interface() +} + +// A field identifies a field in a struct, accessible from a structPointer. +// In this implementation, a field is identified by its byte offset from the start of the struct. +type field uintptr + +// toField returns a field equivalent to the given reflect field. +func toField(f *reflect.StructField) field { + return field(f.Offset) +} + +// invalidField is an invalid field identifier. +const invalidField = ^field(0) + +// IsValid reports whether the field identifier is valid. +func (f field) IsValid() bool { + return f != ^field(0) +} + +// Bytes returns the address of a []byte field in the struct. +func structPointer_Bytes(p structPointer, f field) *[]byte { + return (*[]byte)(unsafe.Pointer(uintptr(p) + uintptr(f))) +} + +// BytesSlice returns the address of a [][]byte field in the struct. +func structPointer_BytesSlice(p structPointer, f field) *[][]byte { + return (*[][]byte)(unsafe.Pointer(uintptr(p) + uintptr(f))) +} + +// Bool returns the address of a *bool field in the struct. +func structPointer_Bool(p structPointer, f field) **bool { + return (**bool)(unsafe.Pointer(uintptr(p) + uintptr(f))) +} + +// BoolVal returns the address of a bool field in the struct. +func structPointer_BoolVal(p structPointer, f field) *bool { + return (*bool)(unsafe.Pointer(uintptr(p) + uintptr(f))) +} + +// BoolSlice returns the address of a []bool field in the struct. +func structPointer_BoolSlice(p structPointer, f field) *[]bool { + return (*[]bool)(unsafe.Pointer(uintptr(p) + uintptr(f))) +} + +// String returns the address of a *string field in the struct. +func structPointer_String(p structPointer, f field) **string { + return (**string)(unsafe.Pointer(uintptr(p) + uintptr(f))) +} + +// StringVal returns the address of a string field in the struct. +func structPointer_StringVal(p structPointer, f field) *string { + return (*string)(unsafe.Pointer(uintptr(p) + uintptr(f))) +} + +// StringSlice returns the address of a []string field in the struct. +func structPointer_StringSlice(p structPointer, f field) *[]string { + return (*[]string)(unsafe.Pointer(uintptr(p) + uintptr(f))) +} + +// ExtMap returns the address of an extension map field in the struct. +func structPointer_Extensions(p structPointer, f field) *XXX_InternalExtensions { + return (*XXX_InternalExtensions)(unsafe.Pointer(uintptr(p) + uintptr(f))) +} + +func structPointer_ExtMap(p structPointer, f field) *map[int32]Extension { + return (*map[int32]Extension)(unsafe.Pointer(uintptr(p) + uintptr(f))) +} + +// NewAt returns the reflect.Value for a pointer to a field in the struct. +func structPointer_NewAt(p structPointer, f field, typ reflect.Type) reflect.Value { + return reflect.NewAt(typ, unsafe.Pointer(uintptr(p)+uintptr(f))) +} + +// SetStructPointer writes a *struct field in the struct. +func structPointer_SetStructPointer(p structPointer, f field, q structPointer) { + *(*structPointer)(unsafe.Pointer(uintptr(p) + uintptr(f))) = q +} + +// GetStructPointer reads a *struct field in the struct. +func structPointer_GetStructPointer(p structPointer, f field) structPointer { + return *(*structPointer)(unsafe.Pointer(uintptr(p) + uintptr(f))) +} + +// StructPointerSlice the address of a []*struct field in the struct. +func structPointer_StructPointerSlice(p structPointer, f field) *structPointerSlice { + return (*structPointerSlice)(unsafe.Pointer(uintptr(p) + uintptr(f))) +} + +// A structPointerSlice represents a slice of pointers to structs (themselves submessages or groups). +type structPointerSlice []structPointer + +func (v *structPointerSlice) Len() int { return len(*v) } +func (v *structPointerSlice) Index(i int) structPointer { return (*v)[i] } +func (v *structPointerSlice) Append(p structPointer) { *v = append(*v, p) } + +// A word32 is the address of a "pointer to 32-bit value" field. +type word32 **uint32 + +// IsNil reports whether *v is nil. +func word32_IsNil(p word32) bool { + return *p == nil +} + +// Set sets *v to point at a newly allocated word set to x. +func word32_Set(p word32, o *Buffer, x uint32) { + if len(o.uint32s) == 0 { + o.uint32s = make([]uint32, uint32PoolSize) + } + o.uint32s[0] = x + *p = &o.uint32s[0] + o.uint32s = o.uint32s[1:] +} + +// Get gets the value pointed at by *v. +func word32_Get(p word32) uint32 { + return **p +} + +// Word32 returns the address of a *int32, *uint32, *float32, or *enum field in the struct. +func structPointer_Word32(p structPointer, f field) word32 { + return word32((**uint32)(unsafe.Pointer(uintptr(p) + uintptr(f)))) +} + +// A word32Val is the address of a 32-bit value field. +type word32Val *uint32 + +// Set sets *p to x. +func word32Val_Set(p word32Val, x uint32) { + *p = x +} + +// Get gets the value pointed at by p. +func word32Val_Get(p word32Val) uint32 { + return *p +} + +// Word32Val returns the address of a *int32, *uint32, *float32, or *enum field in the struct. +func structPointer_Word32Val(p structPointer, f field) word32Val { + return word32Val((*uint32)(unsafe.Pointer(uintptr(p) + uintptr(f)))) +} + +// A word32Slice is a slice of 32-bit values. +type word32Slice []uint32 + +func (v *word32Slice) Append(x uint32) { *v = append(*v, x) } +func (v *word32Slice) Len() int { return len(*v) } +func (v *word32Slice) Index(i int) uint32 { return (*v)[i] } + +// Word32Slice returns the address of a []int32, []uint32, []float32, or []enum field in the struct. +func structPointer_Word32Slice(p structPointer, f field) *word32Slice { + return (*word32Slice)(unsafe.Pointer(uintptr(p) + uintptr(f))) +} + +// word64 is like word32 but for 64-bit values. +type word64 **uint64 + +func word64_Set(p word64, o *Buffer, x uint64) { + if len(o.uint64s) == 0 { + o.uint64s = make([]uint64, uint64PoolSize) + } + o.uint64s[0] = x + *p = &o.uint64s[0] + o.uint64s = o.uint64s[1:] +} + +func word64_IsNil(p word64) bool { + return *p == nil +} + +func word64_Get(p word64) uint64 { + return **p +} + +func structPointer_Word64(p structPointer, f field) word64 { + return word64((**uint64)(unsafe.Pointer(uintptr(p) + uintptr(f)))) +} + +// word64Val is like word32Val but for 64-bit values. +type word64Val *uint64 + +func word64Val_Set(p word64Val, o *Buffer, x uint64) { + *p = x +} + +func word64Val_Get(p word64Val) uint64 { + return *p +} + +func structPointer_Word64Val(p structPointer, f field) word64Val { + return word64Val((*uint64)(unsafe.Pointer(uintptr(p) + uintptr(f)))) +} + +// word64Slice is like word32Slice but for 64-bit values. +type word64Slice []uint64 + +func (v *word64Slice) Append(x uint64) { *v = append(*v, x) } +func (v *word64Slice) Len() int { return len(*v) } +func (v *word64Slice) Index(i int) uint64 { return (*v)[i] } + +func structPointer_Word64Slice(p structPointer, f field) *word64Slice { + return (*word64Slice)(unsafe.Pointer(uintptr(p) + uintptr(f))) +} diff --git a/vendor/github.com/golang/protobuf/proto/properties.go b/vendor/github.com/golang/protobuf/proto/properties.go new file mode 100644 index 000000000..ec2289c00 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/properties.go @@ -0,0 +1,872 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2010 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package proto + +/* + * Routines for encoding data into the wire format for protocol buffers. + */ + +import ( + "fmt" + "log" + "os" + "reflect" + "sort" + "strconv" + "strings" + "sync" +) + +const debug bool = false + +// Constants that identify the encoding of a value on the wire. +const ( + WireVarint = 0 + WireFixed64 = 1 + WireBytes = 2 + WireStartGroup = 3 + WireEndGroup = 4 + WireFixed32 = 5 +) + +const startSize = 10 // initial slice/string sizes + +// Encoders are defined in encode.go +// An encoder outputs the full representation of a field, including its +// tag and encoder type. +type encoder func(p *Buffer, prop *Properties, base structPointer) error + +// A valueEncoder encodes a single integer in a particular encoding. +type valueEncoder func(o *Buffer, x uint64) error + +// Sizers are defined in encode.go +// A sizer returns the encoded size of a field, including its tag and encoder +// type. +type sizer func(prop *Properties, base structPointer) int + +// A valueSizer returns the encoded size of a single integer in a particular +// encoding. +type valueSizer func(x uint64) int + +// Decoders are defined in decode.go +// A decoder creates a value from its wire representation. +// Unrecognized subelements are saved in unrec. +type decoder func(p *Buffer, prop *Properties, base structPointer) error + +// A valueDecoder decodes a single integer in a particular encoding. +type valueDecoder func(o *Buffer) (x uint64, err error) + +// A oneofMarshaler does the marshaling for all oneof fields in a message. +type oneofMarshaler func(Message, *Buffer) error + +// A oneofUnmarshaler does the unmarshaling for a oneof field in a message. +type oneofUnmarshaler func(Message, int, int, *Buffer) (bool, error) + +// A oneofSizer does the sizing for all oneof fields in a message. +type oneofSizer func(Message) int + +// tagMap is an optimization over map[int]int for typical protocol buffer +// use-cases. Encoded protocol buffers are often in tag order with small tag +// numbers. +type tagMap struct { + fastTags []int + slowTags map[int]int +} + +// tagMapFastLimit is the upper bound on the tag number that will be stored in +// the tagMap slice rather than its map. +const tagMapFastLimit = 1024 + +func (p *tagMap) get(t int) (int, bool) { + if t > 0 && t < tagMapFastLimit { + if t >= len(p.fastTags) { + return 0, false + } + fi := p.fastTags[t] + return fi, fi >= 0 + } + fi, ok := p.slowTags[t] + return fi, ok +} + +func (p *tagMap) put(t int, fi int) { + if t > 0 && t < tagMapFastLimit { + for len(p.fastTags) < t+1 { + p.fastTags = append(p.fastTags, -1) + } + p.fastTags[t] = fi + return + } + if p.slowTags == nil { + p.slowTags = make(map[int]int) + } + p.slowTags[t] = fi +} + +// StructProperties represents properties for all the fields of a struct. +// decoderTags and decoderOrigNames should only be used by the decoder. +type StructProperties struct { + Prop []*Properties // properties for each field + reqCount int // required count + decoderTags tagMap // map from proto tag to struct field number + decoderOrigNames map[string]int // map from original name to struct field number + order []int // list of struct field numbers in tag order + unrecField field // field id of the XXX_unrecognized []byte field + extendable bool // is this an extendable proto + + oneofMarshaler oneofMarshaler + oneofUnmarshaler oneofUnmarshaler + oneofSizer oneofSizer + stype reflect.Type + + // OneofTypes contains information about the oneof fields in this message. + // It is keyed by the original name of a field. + OneofTypes map[string]*OneofProperties +} + +// OneofProperties represents information about a specific field in a oneof. +type OneofProperties struct { + Type reflect.Type // pointer to generated struct type for this oneof field + Field int // struct field number of the containing oneof in the message + Prop *Properties +} + +// Implement the sorting interface so we can sort the fields in tag order, as recommended by the spec. +// See encode.go, (*Buffer).enc_struct. + +func (sp *StructProperties) Len() int { return len(sp.order) } +func (sp *StructProperties) Less(i, j int) bool { + return sp.Prop[sp.order[i]].Tag < sp.Prop[sp.order[j]].Tag +} +func (sp *StructProperties) Swap(i, j int) { sp.order[i], sp.order[j] = sp.order[j], sp.order[i] } + +// Properties represents the protocol-specific behavior of a single struct field. +type Properties struct { + Name string // name of the field, for error messages + OrigName string // original name before protocol compiler (always set) + JSONName string // name to use for JSON; determined by protoc + Wire string + WireType int + Tag int + Required bool + Optional bool + Repeated bool + Packed bool // relevant for repeated primitives only + Enum string // set for enum types only + proto3 bool // whether this is known to be a proto3 field; set for []byte only + oneof bool // whether this is a oneof field + + Default string // default value + HasDefault bool // whether an explicit default was provided + def_uint64 uint64 + + enc encoder + valEnc valueEncoder // set for bool and numeric types only + field field + tagcode []byte // encoding of EncodeVarint((Tag<<3)|WireType) + tagbuf [8]byte + stype reflect.Type // set for struct types only + sprop *StructProperties // set for struct types only + isMarshaler bool + isUnmarshaler bool + + mtype reflect.Type // set for map types only + mkeyprop *Properties // set for map types only + mvalprop *Properties // set for map types only + + size sizer + valSize valueSizer // set for bool and numeric types only + + dec decoder + valDec valueDecoder // set for bool and numeric types only + + // If this is a packable field, this will be the decoder for the packed version of the field. + packedDec decoder +} + +// String formats the properties in the protobuf struct field tag style. +func (p *Properties) String() string { + s := p.Wire + s = "," + s += strconv.Itoa(p.Tag) + if p.Required { + s += ",req" + } + if p.Optional { + s += ",opt" + } + if p.Repeated { + s += ",rep" + } + if p.Packed { + s += ",packed" + } + s += ",name=" + p.OrigName + if p.JSONName != p.OrigName { + s += ",json=" + p.JSONName + } + if p.proto3 { + s += ",proto3" + } + if p.oneof { + s += ",oneof" + } + if len(p.Enum) > 0 { + s += ",enum=" + p.Enum + } + if p.HasDefault { + s += ",def=" + p.Default + } + return s +} + +// Parse populates p by parsing a string in the protobuf struct field tag style. +func (p *Properties) Parse(s string) { + // "bytes,49,opt,name=foo,def=hello!" + fields := strings.Split(s, ",") // breaks def=, but handled below. + if len(fields) < 2 { + fmt.Fprintf(os.Stderr, "proto: tag has too few fields: %q\n", s) + return + } + + p.Wire = fields[0] + switch p.Wire { + case "varint": + p.WireType = WireVarint + p.valEnc = (*Buffer).EncodeVarint + p.valDec = (*Buffer).DecodeVarint + p.valSize = sizeVarint + case "fixed32": + p.WireType = WireFixed32 + p.valEnc = (*Buffer).EncodeFixed32 + p.valDec = (*Buffer).DecodeFixed32 + p.valSize = sizeFixed32 + case "fixed64": + p.WireType = WireFixed64 + p.valEnc = (*Buffer).EncodeFixed64 + p.valDec = (*Buffer).DecodeFixed64 + p.valSize = sizeFixed64 + case "zigzag32": + p.WireType = WireVarint + p.valEnc = (*Buffer).EncodeZigzag32 + p.valDec = (*Buffer).DecodeZigzag32 + p.valSize = sizeZigzag32 + case "zigzag64": + p.WireType = WireVarint + p.valEnc = (*Buffer).EncodeZigzag64 + p.valDec = (*Buffer).DecodeZigzag64 + p.valSize = sizeZigzag64 + case "bytes", "group": + p.WireType = WireBytes + // no numeric converter for non-numeric types + default: + fmt.Fprintf(os.Stderr, "proto: tag has unknown wire type: %q\n", s) + return + } + + var err error + p.Tag, err = strconv.Atoi(fields[1]) + if err != nil { + return + } + + for i := 2; i < len(fields); i++ { + f := fields[i] + switch { + case f == "req": + p.Required = true + case f == "opt": + p.Optional = true + case f == "rep": + p.Repeated = true + case f == "packed": + p.Packed = true + case strings.HasPrefix(f, "name="): + p.OrigName = f[5:] + case strings.HasPrefix(f, "json="): + p.JSONName = f[5:] + case strings.HasPrefix(f, "enum="): + p.Enum = f[5:] + case f == "proto3": + p.proto3 = true + case f == "oneof": + p.oneof = true + case strings.HasPrefix(f, "def="): + p.HasDefault = true + p.Default = f[4:] // rest of string + if i+1 < len(fields) { + // Commas aren't escaped, and def is always last. + p.Default += "," + strings.Join(fields[i+1:], ",") + break + } + } + } +} + +func logNoSliceEnc(t1, t2 reflect.Type) { + fmt.Fprintf(os.Stderr, "proto: no slice oenc for %T = []%T\n", t1, t2) +} + +var protoMessageType = reflect.TypeOf((*Message)(nil)).Elem() + +// Initialize the fields for encoding and decoding. +func (p *Properties) setEncAndDec(typ reflect.Type, f *reflect.StructField, lockGetProp bool) { + p.enc = nil + p.dec = nil + p.size = nil + + switch t1 := typ; t1.Kind() { + default: + fmt.Fprintf(os.Stderr, "proto: no coders for %v\n", t1) + + // proto3 scalar types + + case reflect.Bool: + p.enc = (*Buffer).enc_proto3_bool + p.dec = (*Buffer).dec_proto3_bool + p.size = size_proto3_bool + case reflect.Int32: + p.enc = (*Buffer).enc_proto3_int32 + p.dec = (*Buffer).dec_proto3_int32 + p.size = size_proto3_int32 + case reflect.Uint32: + p.enc = (*Buffer).enc_proto3_uint32 + p.dec = (*Buffer).dec_proto3_int32 // can reuse + p.size = size_proto3_uint32 + case reflect.Int64, reflect.Uint64: + p.enc = (*Buffer).enc_proto3_int64 + p.dec = (*Buffer).dec_proto3_int64 + p.size = size_proto3_int64 + case reflect.Float32: + p.enc = (*Buffer).enc_proto3_uint32 // can just treat them as bits + p.dec = (*Buffer).dec_proto3_int32 + p.size = size_proto3_uint32 + case reflect.Float64: + p.enc = (*Buffer).enc_proto3_int64 // can just treat them as bits + p.dec = (*Buffer).dec_proto3_int64 + p.size = size_proto3_int64 + case reflect.String: + p.enc = (*Buffer).enc_proto3_string + p.dec = (*Buffer).dec_proto3_string + p.size = size_proto3_string + + case reflect.Ptr: + switch t2 := t1.Elem(); t2.Kind() { + default: + fmt.Fprintf(os.Stderr, "proto: no encoder function for %v -> %v\n", t1, t2) + break + case reflect.Bool: + p.enc = (*Buffer).enc_bool + p.dec = (*Buffer).dec_bool + p.size = size_bool + case reflect.Int32: + p.enc = (*Buffer).enc_int32 + p.dec = (*Buffer).dec_int32 + p.size = size_int32 + case reflect.Uint32: + p.enc = (*Buffer).enc_uint32 + p.dec = (*Buffer).dec_int32 // can reuse + p.size = size_uint32 + case reflect.Int64, reflect.Uint64: + p.enc = (*Buffer).enc_int64 + p.dec = (*Buffer).dec_int64 + p.size = size_int64 + case reflect.Float32: + p.enc = (*Buffer).enc_uint32 // can just treat them as bits + p.dec = (*Buffer).dec_int32 + p.size = size_uint32 + case reflect.Float64: + p.enc = (*Buffer).enc_int64 // can just treat them as bits + p.dec = (*Buffer).dec_int64 + p.size = size_int64 + case reflect.String: + p.enc = (*Buffer).enc_string + p.dec = (*Buffer).dec_string + p.size = size_string + case reflect.Struct: + p.stype = t1.Elem() + p.isMarshaler = isMarshaler(t1) + p.isUnmarshaler = isUnmarshaler(t1) + if p.Wire == "bytes" { + p.enc = (*Buffer).enc_struct_message + p.dec = (*Buffer).dec_struct_message + p.size = size_struct_message + } else { + p.enc = (*Buffer).enc_struct_group + p.dec = (*Buffer).dec_struct_group + p.size = size_struct_group + } + } + + case reflect.Slice: + switch t2 := t1.Elem(); t2.Kind() { + default: + logNoSliceEnc(t1, t2) + break + case reflect.Bool: + if p.Packed { + p.enc = (*Buffer).enc_slice_packed_bool + p.size = size_slice_packed_bool + } else { + p.enc = (*Buffer).enc_slice_bool + p.size = size_slice_bool + } + p.dec = (*Buffer).dec_slice_bool + p.packedDec = (*Buffer).dec_slice_packed_bool + case reflect.Int32: + if p.Packed { + p.enc = (*Buffer).enc_slice_packed_int32 + p.size = size_slice_packed_int32 + } else { + p.enc = (*Buffer).enc_slice_int32 + p.size = size_slice_int32 + } + p.dec = (*Buffer).dec_slice_int32 + p.packedDec = (*Buffer).dec_slice_packed_int32 + case reflect.Uint32: + if p.Packed { + p.enc = (*Buffer).enc_slice_packed_uint32 + p.size = size_slice_packed_uint32 + } else { + p.enc = (*Buffer).enc_slice_uint32 + p.size = size_slice_uint32 + } + p.dec = (*Buffer).dec_slice_int32 + p.packedDec = (*Buffer).dec_slice_packed_int32 + case reflect.Int64, reflect.Uint64: + if p.Packed { + p.enc = (*Buffer).enc_slice_packed_int64 + p.size = size_slice_packed_int64 + } else { + p.enc = (*Buffer).enc_slice_int64 + p.size = size_slice_int64 + } + p.dec = (*Buffer).dec_slice_int64 + p.packedDec = (*Buffer).dec_slice_packed_int64 + case reflect.Uint8: + p.dec = (*Buffer).dec_slice_byte + if p.proto3 { + p.enc = (*Buffer).enc_proto3_slice_byte + p.size = size_proto3_slice_byte + } else { + p.enc = (*Buffer).enc_slice_byte + p.size = size_slice_byte + } + case reflect.Float32, reflect.Float64: + switch t2.Bits() { + case 32: + // can just treat them as bits + if p.Packed { + p.enc = (*Buffer).enc_slice_packed_uint32 + p.size = size_slice_packed_uint32 + } else { + p.enc = (*Buffer).enc_slice_uint32 + p.size = size_slice_uint32 + } + p.dec = (*Buffer).dec_slice_int32 + p.packedDec = (*Buffer).dec_slice_packed_int32 + case 64: + // can just treat them as bits + if p.Packed { + p.enc = (*Buffer).enc_slice_packed_int64 + p.size = size_slice_packed_int64 + } else { + p.enc = (*Buffer).enc_slice_int64 + p.size = size_slice_int64 + } + p.dec = (*Buffer).dec_slice_int64 + p.packedDec = (*Buffer).dec_slice_packed_int64 + default: + logNoSliceEnc(t1, t2) + break + } + case reflect.String: + p.enc = (*Buffer).enc_slice_string + p.dec = (*Buffer).dec_slice_string + p.size = size_slice_string + case reflect.Ptr: + switch t3 := t2.Elem(); t3.Kind() { + default: + fmt.Fprintf(os.Stderr, "proto: no ptr oenc for %T -> %T -> %T\n", t1, t2, t3) + break + case reflect.Struct: + p.stype = t2.Elem() + p.isMarshaler = isMarshaler(t2) + p.isUnmarshaler = isUnmarshaler(t2) + if p.Wire == "bytes" { + p.enc = (*Buffer).enc_slice_struct_message + p.dec = (*Buffer).dec_slice_struct_message + p.size = size_slice_struct_message + } else { + p.enc = (*Buffer).enc_slice_struct_group + p.dec = (*Buffer).dec_slice_struct_group + p.size = size_slice_struct_group + } + } + case reflect.Slice: + switch t2.Elem().Kind() { + default: + fmt.Fprintf(os.Stderr, "proto: no slice elem oenc for %T -> %T -> %T\n", t1, t2, t2.Elem()) + break + case reflect.Uint8: + p.enc = (*Buffer).enc_slice_slice_byte + p.dec = (*Buffer).dec_slice_slice_byte + p.size = size_slice_slice_byte + } + } + + case reflect.Map: + p.enc = (*Buffer).enc_new_map + p.dec = (*Buffer).dec_new_map + p.size = size_new_map + + p.mtype = t1 + p.mkeyprop = &Properties{} + p.mkeyprop.init(reflect.PtrTo(p.mtype.Key()), "Key", f.Tag.Get("protobuf_key"), nil, lockGetProp) + p.mvalprop = &Properties{} + vtype := p.mtype.Elem() + if vtype.Kind() != reflect.Ptr && vtype.Kind() != reflect.Slice { + // The value type is not a message (*T) or bytes ([]byte), + // so we need encoders for the pointer to this type. + vtype = reflect.PtrTo(vtype) + } + p.mvalprop.init(vtype, "Value", f.Tag.Get("protobuf_val"), nil, lockGetProp) + } + + // precalculate tag code + wire := p.WireType + if p.Packed { + wire = WireBytes + } + x := uint32(p.Tag)<<3 | uint32(wire) + i := 0 + for i = 0; x > 127; i++ { + p.tagbuf[i] = 0x80 | uint8(x&0x7F) + x >>= 7 + } + p.tagbuf[i] = uint8(x) + p.tagcode = p.tagbuf[0 : i+1] + + if p.stype != nil { + if lockGetProp { + p.sprop = GetProperties(p.stype) + } else { + p.sprop = getPropertiesLocked(p.stype) + } + } +} + +var ( + marshalerType = reflect.TypeOf((*Marshaler)(nil)).Elem() + unmarshalerType = reflect.TypeOf((*Unmarshaler)(nil)).Elem() +) + +// isMarshaler reports whether type t implements Marshaler. +func isMarshaler(t reflect.Type) bool { + // We're checking for (likely) pointer-receiver methods + // so if t is not a pointer, something is very wrong. + // The calls above only invoke isMarshaler on pointer types. + if t.Kind() != reflect.Ptr { + panic("proto: misuse of isMarshaler") + } + return t.Implements(marshalerType) +} + +// isUnmarshaler reports whether type t implements Unmarshaler. +func isUnmarshaler(t reflect.Type) bool { + // We're checking for (likely) pointer-receiver methods + // so if t is not a pointer, something is very wrong. + // The calls above only invoke isUnmarshaler on pointer types. + if t.Kind() != reflect.Ptr { + panic("proto: misuse of isUnmarshaler") + } + return t.Implements(unmarshalerType) +} + +// Init populates the properties from a protocol buffer struct tag. +func (p *Properties) Init(typ reflect.Type, name, tag string, f *reflect.StructField) { + p.init(typ, name, tag, f, true) +} + +func (p *Properties) init(typ reflect.Type, name, tag string, f *reflect.StructField, lockGetProp bool) { + // "bytes,49,opt,def=hello!" + p.Name = name + p.OrigName = name + if f != nil { + p.field = toField(f) + } + if tag == "" { + return + } + p.Parse(tag) + p.setEncAndDec(typ, f, lockGetProp) +} + +var ( + propertiesMu sync.RWMutex + propertiesMap = make(map[reflect.Type]*StructProperties) +) + +// GetProperties returns the list of properties for the type represented by t. +// t must represent a generated struct type of a protocol message. +func GetProperties(t reflect.Type) *StructProperties { + if t.Kind() != reflect.Struct { + panic("proto: type must have kind struct") + } + + // Most calls to GetProperties in a long-running program will be + // retrieving details for types we have seen before. + propertiesMu.RLock() + sprop, ok := propertiesMap[t] + propertiesMu.RUnlock() + if ok { + if collectStats { + stats.Chit++ + } + return sprop + } + + propertiesMu.Lock() + sprop = getPropertiesLocked(t) + propertiesMu.Unlock() + return sprop +} + +// getPropertiesLocked requires that propertiesMu is held. +func getPropertiesLocked(t reflect.Type) *StructProperties { + if prop, ok := propertiesMap[t]; ok { + if collectStats { + stats.Chit++ + } + return prop + } + if collectStats { + stats.Cmiss++ + } + + prop := new(StructProperties) + // in case of recursive protos, fill this in now. + propertiesMap[t] = prop + + // build properties + prop.extendable = reflect.PtrTo(t).Implements(extendableProtoType) || + reflect.PtrTo(t).Implements(extendableProtoV1Type) + prop.unrecField = invalidField + prop.Prop = make([]*Properties, t.NumField()) + prop.order = make([]int, t.NumField()) + + for i := 0; i < t.NumField(); i++ { + f := t.Field(i) + p := new(Properties) + name := f.Name + p.init(f.Type, name, f.Tag.Get("protobuf"), &f, false) + + if f.Name == "XXX_InternalExtensions" { // special case + p.enc = (*Buffer).enc_exts + p.dec = nil // not needed + p.size = size_exts + } else if f.Name == "XXX_extensions" { // special case + p.enc = (*Buffer).enc_map + p.dec = nil // not needed + p.size = size_map + } else if f.Name == "XXX_unrecognized" { // special case + prop.unrecField = toField(&f) + } + oneof := f.Tag.Get("protobuf_oneof") // special case + if oneof != "" { + // Oneof fields don't use the traditional protobuf tag. + p.OrigName = oneof + } + prop.Prop[i] = p + prop.order[i] = i + if debug { + print(i, " ", f.Name, " ", t.String(), " ") + if p.Tag > 0 { + print(p.String()) + } + print("\n") + } + if p.enc == nil && !strings.HasPrefix(f.Name, "XXX_") && oneof == "" { + fmt.Fprintln(os.Stderr, "proto: no encoder for", f.Name, f.Type.String(), "[GetProperties]") + } + } + + // Re-order prop.order. + sort.Sort(prop) + + type oneofMessage interface { + XXX_OneofFuncs() (func(Message, *Buffer) error, func(Message, int, int, *Buffer) (bool, error), func(Message) int, []interface{}) + } + if om, ok := reflect.Zero(reflect.PtrTo(t)).Interface().(oneofMessage); ok { + var oots []interface{} + prop.oneofMarshaler, prop.oneofUnmarshaler, prop.oneofSizer, oots = om.XXX_OneofFuncs() + prop.stype = t + + // Interpret oneof metadata. + prop.OneofTypes = make(map[string]*OneofProperties) + for _, oot := range oots { + oop := &OneofProperties{ + Type: reflect.ValueOf(oot).Type(), // *T + Prop: new(Properties), + } + sft := oop.Type.Elem().Field(0) + oop.Prop.Name = sft.Name + oop.Prop.Parse(sft.Tag.Get("protobuf")) + // There will be exactly one interface field that + // this new value is assignable to. + for i := 0; i < t.NumField(); i++ { + f := t.Field(i) + if f.Type.Kind() != reflect.Interface { + continue + } + if !oop.Type.AssignableTo(f.Type) { + continue + } + oop.Field = i + break + } + prop.OneofTypes[oop.Prop.OrigName] = oop + } + } + + // build required counts + // build tags + reqCount := 0 + prop.decoderOrigNames = make(map[string]int) + for i, p := range prop.Prop { + if strings.HasPrefix(p.Name, "XXX_") { + // Internal fields should not appear in tags/origNames maps. + // They are handled specially when encoding and decoding. + continue + } + if p.Required { + reqCount++ + } + prop.decoderTags.put(p.Tag, i) + prop.decoderOrigNames[p.OrigName] = i + } + prop.reqCount = reqCount + + return prop +} + +// Return the Properties object for the x[0]'th field of the structure. +func propByIndex(t reflect.Type, x []int) *Properties { + if len(x) != 1 { + fmt.Fprintf(os.Stderr, "proto: field index dimension %d (not 1) for type %s\n", len(x), t) + return nil + } + prop := GetProperties(t) + return prop.Prop[x[0]] +} + +// Get the address and type of a pointer to a struct from an interface. +func getbase(pb Message) (t reflect.Type, b structPointer, err error) { + if pb == nil { + err = ErrNil + return + } + // get the reflect type of the pointer to the struct. + t = reflect.TypeOf(pb) + // get the address of the struct. + value := reflect.ValueOf(pb) + b = toStructPointer(value) + return +} + +// A global registry of enum types. +// The generated code will register the generated maps by calling RegisterEnum. + +var enumValueMaps = make(map[string]map[string]int32) + +// RegisterEnum is called from the generated code to install the enum descriptor +// maps into the global table to aid parsing text format protocol buffers. +func RegisterEnum(typeName string, unusedNameMap map[int32]string, valueMap map[string]int32) { + if _, ok := enumValueMaps[typeName]; ok { + panic("proto: duplicate enum registered: " + typeName) + } + enumValueMaps[typeName] = valueMap +} + +// EnumValueMap returns the mapping from names to integers of the +// enum type enumType, or a nil if not found. +func EnumValueMap(enumType string) map[string]int32 { + return enumValueMaps[enumType] +} + +// A registry of all linked message types. +// The string is a fully-qualified proto name ("pkg.Message"). +var ( + protoTypes = make(map[string]reflect.Type) + revProtoTypes = make(map[reflect.Type]string) +) + +// RegisterType is called from generated code and maps from the fully qualified +// proto name to the type (pointer to struct) of the protocol buffer. +func RegisterType(x Message, name string) { + if _, ok := protoTypes[name]; ok { + // TODO: Some day, make this a panic. + log.Printf("proto: duplicate proto type registered: %s", name) + return + } + t := reflect.TypeOf(x) + protoTypes[name] = t + revProtoTypes[t] = name +} + +// MessageName returns the fully-qualified proto name for the given message type. +func MessageName(x Message) string { + type xname interface { + XXX_MessageName() string + } + if m, ok := x.(xname); ok { + return m.XXX_MessageName() + } + return revProtoTypes[reflect.TypeOf(x)] +} + +// MessageType returns the message type (pointer to struct) for a named message. +func MessageType(name string) reflect.Type { return protoTypes[name] } + +// A registry of all linked proto files. +var ( + protoFiles = make(map[string][]byte) // file name => fileDescriptor +) + +// RegisterFile is called from generated code and maps from the +// full file name of a .proto file to its compressed FileDescriptorProto. +func RegisterFile(filename string, fileDescriptor []byte) { + protoFiles[filename] = fileDescriptor +} + +// FileDescriptor returns the compressed FileDescriptorProto for a .proto file. +func FileDescriptor(filename string) []byte { return protoFiles[filename] } diff --git a/vendor/github.com/golang/protobuf/proto/proto3_proto/proto3.pb.go b/vendor/github.com/golang/protobuf/proto/proto3_proto/proto3.pb.go new file mode 100644 index 000000000..cc4d0489f --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/proto3_proto/proto3.pb.go @@ -0,0 +1,347 @@ +// Code generated by protoc-gen-go. +// source: proto3_proto/proto3.proto +// DO NOT EDIT! + +/* +Package proto3_proto is a generated protocol buffer package. + +It is generated from these files: + proto3_proto/proto3.proto + +It has these top-level messages: + Message + Nested + MessageWithMap + IntMap + IntMaps +*/ +package proto3_proto + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import google_protobuf "github.com/golang/protobuf/ptypes/any" +import testdata "github.com/golang/protobuf/proto/testdata" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type Message_Humour int32 + +const ( + Message_UNKNOWN Message_Humour = 0 + Message_PUNS Message_Humour = 1 + Message_SLAPSTICK Message_Humour = 2 + Message_BILL_BAILEY Message_Humour = 3 +) + +var Message_Humour_name = map[int32]string{ + 0: "UNKNOWN", + 1: "PUNS", + 2: "SLAPSTICK", + 3: "BILL_BAILEY", +} +var Message_Humour_value = map[string]int32{ + "UNKNOWN": 0, + "PUNS": 1, + "SLAPSTICK": 2, + "BILL_BAILEY": 3, +} + +func (x Message_Humour) String() string { + return proto.EnumName(Message_Humour_name, int32(x)) +} +func (Message_Humour) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{0, 0} } + +type Message struct { + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Hilarity Message_Humour `protobuf:"varint,2,opt,name=hilarity,enum=proto3_proto.Message_Humour" json:"hilarity,omitempty"` + HeightInCm uint32 `protobuf:"varint,3,opt,name=height_in_cm,json=heightInCm" json:"height_in_cm,omitempty"` + Data []byte `protobuf:"bytes,4,opt,name=data,proto3" json:"data,omitempty"` + ResultCount int64 `protobuf:"varint,7,opt,name=result_count,json=resultCount" json:"result_count,omitempty"` + TrueScotsman bool `protobuf:"varint,8,opt,name=true_scotsman,json=trueScotsman" json:"true_scotsman,omitempty"` + Score float32 `protobuf:"fixed32,9,opt,name=score" json:"score,omitempty"` + Key []uint64 `protobuf:"varint,5,rep,packed,name=key" json:"key,omitempty"` + ShortKey []int32 `protobuf:"varint,19,rep,packed,name=short_key,json=shortKey" json:"short_key,omitempty"` + Nested *Nested `protobuf:"bytes,6,opt,name=nested" json:"nested,omitempty"` + RFunny []Message_Humour `protobuf:"varint,16,rep,packed,name=r_funny,json=rFunny,enum=proto3_proto.Message_Humour" json:"r_funny,omitempty"` + Terrain map[string]*Nested `protobuf:"bytes,10,rep,name=terrain" json:"terrain,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Proto2Field *testdata.SubDefaults `protobuf:"bytes,11,opt,name=proto2_field,json=proto2Field" json:"proto2_field,omitempty"` + Proto2Value map[string]*testdata.SubDefaults `protobuf:"bytes,13,rep,name=proto2_value,json=proto2Value" json:"proto2_value,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Anything *google_protobuf.Any `protobuf:"bytes,14,opt,name=anything" json:"anything,omitempty"` + ManyThings []*google_protobuf.Any `protobuf:"bytes,15,rep,name=many_things,json=manyThings" json:"many_things,omitempty"` + Submessage *Message `protobuf:"bytes,17,opt,name=submessage" json:"submessage,omitempty"` + Children []*Message `protobuf:"bytes,18,rep,name=children" json:"children,omitempty"` +} + +func (m *Message) Reset() { *m = Message{} } +func (m *Message) String() string { return proto.CompactTextString(m) } +func (*Message) ProtoMessage() {} +func (*Message) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } + +func (m *Message) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Message) GetHilarity() Message_Humour { + if m != nil { + return m.Hilarity + } + return Message_UNKNOWN +} + +func (m *Message) GetHeightInCm() uint32 { + if m != nil { + return m.HeightInCm + } + return 0 +} + +func (m *Message) GetData() []byte { + if m != nil { + return m.Data + } + return nil +} + +func (m *Message) GetResultCount() int64 { + if m != nil { + return m.ResultCount + } + return 0 +} + +func (m *Message) GetTrueScotsman() bool { + if m != nil { + return m.TrueScotsman + } + return false +} + +func (m *Message) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +func (m *Message) GetKey() []uint64 { + if m != nil { + return m.Key + } + return nil +} + +func (m *Message) GetShortKey() []int32 { + if m != nil { + return m.ShortKey + } + return nil +} + +func (m *Message) GetNested() *Nested { + if m != nil { + return m.Nested + } + return nil +} + +func (m *Message) GetRFunny() []Message_Humour { + if m != nil { + return m.RFunny + } + return nil +} + +func (m *Message) GetTerrain() map[string]*Nested { + if m != nil { + return m.Terrain + } + return nil +} + +func (m *Message) GetProto2Field() *testdata.SubDefaults { + if m != nil { + return m.Proto2Field + } + return nil +} + +func (m *Message) GetProto2Value() map[string]*testdata.SubDefaults { + if m != nil { + return m.Proto2Value + } + return nil +} + +func (m *Message) GetAnything() *google_protobuf.Any { + if m != nil { + return m.Anything + } + return nil +} + +func (m *Message) GetManyThings() []*google_protobuf.Any { + if m != nil { + return m.ManyThings + } + return nil +} + +func (m *Message) GetSubmessage() *Message { + if m != nil { + return m.Submessage + } + return nil +} + +func (m *Message) GetChildren() []*Message { + if m != nil { + return m.Children + } + return nil +} + +type Nested struct { + Bunny string `protobuf:"bytes,1,opt,name=bunny" json:"bunny,omitempty"` + Cute bool `protobuf:"varint,2,opt,name=cute" json:"cute,omitempty"` +} + +func (m *Nested) Reset() { *m = Nested{} } +func (m *Nested) String() string { return proto.CompactTextString(m) } +func (*Nested) ProtoMessage() {} +func (*Nested) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } + +func (m *Nested) GetBunny() string { + if m != nil { + return m.Bunny + } + return "" +} + +func (m *Nested) GetCute() bool { + if m != nil { + return m.Cute + } + return false +} + +type MessageWithMap struct { + ByteMapping map[bool][]byte `protobuf:"bytes,1,rep,name=byte_mapping,json=byteMapping" json:"byte_mapping,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value,proto3"` +} + +func (m *MessageWithMap) Reset() { *m = MessageWithMap{} } +func (m *MessageWithMap) String() string { return proto.CompactTextString(m) } +func (*MessageWithMap) ProtoMessage() {} +func (*MessageWithMap) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } + +func (m *MessageWithMap) GetByteMapping() map[bool][]byte { + if m != nil { + return m.ByteMapping + } + return nil +} + +type IntMap struct { + Rtt map[int32]int32 `protobuf:"bytes,1,rep,name=rtt" json:"rtt,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` +} + +func (m *IntMap) Reset() { *m = IntMap{} } +func (m *IntMap) String() string { return proto.CompactTextString(m) } +func (*IntMap) ProtoMessage() {} +func (*IntMap) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } + +func (m *IntMap) GetRtt() map[int32]int32 { + if m != nil { + return m.Rtt + } + return nil +} + +type IntMaps struct { + Maps []*IntMap `protobuf:"bytes,1,rep,name=maps" json:"maps,omitempty"` +} + +func (m *IntMaps) Reset() { *m = IntMaps{} } +func (m *IntMaps) String() string { return proto.CompactTextString(m) } +func (*IntMaps) ProtoMessage() {} +func (*IntMaps) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } + +func (m *IntMaps) GetMaps() []*IntMap { + if m != nil { + return m.Maps + } + return nil +} + +func init() { + proto.RegisterType((*Message)(nil), "proto3_proto.Message") + proto.RegisterType((*Nested)(nil), "proto3_proto.Nested") + proto.RegisterType((*MessageWithMap)(nil), "proto3_proto.MessageWithMap") + proto.RegisterType((*IntMap)(nil), "proto3_proto.IntMap") + proto.RegisterType((*IntMaps)(nil), "proto3_proto.IntMaps") + proto.RegisterEnum("proto3_proto.Message_Humour", Message_Humour_name, Message_Humour_value) +} + +func init() { proto.RegisterFile("proto3_proto/proto3.proto", fileDescriptor0) } + +var fileDescriptor0 = []byte{ + // 733 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0x84, 0x53, 0x6d, 0x6f, 0xf3, 0x34, + 0x14, 0x25, 0x4d, 0x5f, 0xd2, 0x9b, 0x74, 0x0b, 0x5e, 0x91, 0xbc, 0x02, 0x52, 0x28, 0x12, 0x8a, + 0x78, 0x49, 0xa1, 0xd3, 0xd0, 0x84, 0x10, 0x68, 0x1b, 0x9b, 0xa8, 0xd6, 0x95, 0xca, 0xdd, 0x98, + 0xf8, 0x14, 0xa5, 0xad, 0xdb, 0x46, 0x34, 0x4e, 0x49, 0x1c, 0xa4, 0xfc, 0x1d, 0xfe, 0x28, 0x8f, + 0x6c, 0xa7, 0x5d, 0x36, 0x65, 0xcf, 0xf3, 0x29, 0xf6, 0xf1, 0xb9, 0xf7, 0x9c, 0x1c, 0x5f, 0xc3, + 0xe9, 0x2e, 0x89, 0x79, 0x7c, 0xe6, 0xcb, 0xcf, 0x40, 0x6d, 0x3c, 0xf9, 0x41, 0x56, 0xf9, 0xa8, + 0x77, 0xba, 0x8e, 0xe3, 0xf5, 0x96, 0x2a, 0xca, 0x3c, 0x5b, 0x0d, 0x02, 0x96, 0x2b, 0x62, 0xef, + 0x84, 0xd3, 0x94, 0x2f, 0x03, 0x1e, 0x0c, 0xc4, 0x42, 0x81, 0xfd, 0xff, 0x5b, 0xd0, 0xba, 0xa7, + 0x69, 0x1a, 0xac, 0x29, 0x42, 0x50, 0x67, 0x41, 0x44, 0xb1, 0xe6, 0x68, 0x6e, 0x9b, 0xc8, 0x35, + 0xba, 0x00, 0x63, 0x13, 0x6e, 0x83, 0x24, 0xe4, 0x39, 0xae, 0x39, 0x9a, 0x7b, 0x34, 0xfc, 0xcc, + 0x2b, 0x0b, 0x7a, 0x45, 0xb1, 0xf7, 0x7b, 0x16, 0xc5, 0x59, 0x42, 0x0e, 0x6c, 0xe4, 0x80, 0xb5, + 0xa1, 0xe1, 0x7a, 0xc3, 0xfd, 0x90, 0xf9, 0x8b, 0x08, 0xeb, 0x8e, 0xe6, 0x76, 0x08, 0x28, 0x6c, + 0xc4, 0xae, 0x23, 0xa1, 0x27, 0xec, 0xe0, 0xba, 0xa3, 0xb9, 0x16, 0x91, 0x6b, 0xf4, 0x05, 0x58, + 0x09, 0x4d, 0xb3, 0x2d, 0xf7, 0x17, 0x71, 0xc6, 0x38, 0x6e, 0x39, 0x9a, 0xab, 0x13, 0x53, 0x61, + 0xd7, 0x02, 0x42, 0x5f, 0x42, 0x87, 0x27, 0x19, 0xf5, 0xd3, 0x45, 0xcc, 0xd3, 0x28, 0x60, 0xd8, + 0x70, 0x34, 0xd7, 0x20, 0x96, 0x00, 0x67, 0x05, 0x86, 0xba, 0xd0, 0x48, 0x17, 0x71, 0x42, 0x71, + 0xdb, 0xd1, 0xdc, 0x1a, 0x51, 0x1b, 0x64, 0x83, 0xfe, 0x37, 0xcd, 0x71, 0xc3, 0xd1, 0xdd, 0x3a, + 0x11, 0x4b, 0xf4, 0x29, 0xb4, 0xd3, 0x4d, 0x9c, 0x70, 0x5f, 0xe0, 0x27, 0x8e, 0xee, 0x36, 0x88, + 0x21, 0x81, 0x3b, 0x9a, 0xa3, 0x6f, 0xa1, 0xc9, 0x68, 0xca, 0xe9, 0x12, 0x37, 0x1d, 0xcd, 0x35, + 0x87, 0xdd, 0x97, 0xbf, 0x3e, 0x91, 0x67, 0xa4, 0xe0, 0xa0, 0x73, 0x68, 0x25, 0xfe, 0x2a, 0x63, + 0x2c, 0xc7, 0xb6, 0xa3, 0x7f, 0x30, 0xa9, 0x66, 0x72, 0x2b, 0xb8, 0xe8, 0x67, 0x68, 0x71, 0x9a, + 0x24, 0x41, 0xc8, 0x30, 0x38, 0xba, 0x6b, 0x0e, 0xfb, 0xd5, 0x65, 0x0f, 0x8a, 0x74, 0xc3, 0x78, + 0x92, 0x93, 0x7d, 0x09, 0xba, 0x00, 0x75, 0xff, 0x43, 0x7f, 0x15, 0xd2, 0xed, 0x12, 0x9b, 0xd2, + 0xe8, 0x27, 0xde, 0xfe, 0xae, 0xbd, 0x59, 0x36, 0xff, 0x8d, 0xae, 0x82, 0x6c, 0xcb, 0x53, 0x62, + 0x2a, 0xea, 0xad, 0x60, 0xa2, 0xd1, 0xa1, 0xf2, 0xdf, 0x60, 0x9b, 0x51, 0xdc, 0x91, 0xe2, 0x5f, + 0x55, 0x8b, 0x4f, 0x25, 0xf3, 0x4f, 0x41, 0x54, 0x06, 0x8a, 0x56, 0x12, 0x41, 0xdf, 0x83, 0x11, + 0xb0, 0x9c, 0x6f, 0x42, 0xb6, 0xc6, 0x47, 0x45, 0x52, 0x6a, 0x0e, 0xbd, 0xfd, 0x1c, 0x7a, 0x97, + 0x2c, 0x27, 0x07, 0x16, 0x3a, 0x07, 0x33, 0x0a, 0x58, 0xee, 0xcb, 0x5d, 0x8a, 0x8f, 0xa5, 0x76, + 0x75, 0x11, 0x08, 0xe2, 0x83, 0xe4, 0xa1, 0x73, 0x80, 0x34, 0x9b, 0x47, 0xca, 0x14, 0xfe, 0xb8, + 0xf8, 0xd7, 0x2a, 0xc7, 0xa4, 0x44, 0x44, 0x3f, 0x80, 0xb1, 0xd8, 0x84, 0xdb, 0x65, 0x42, 0x19, + 0x46, 0x52, 0xea, 0x8d, 0xa2, 0x03, 0xad, 0x37, 0x05, 0xab, 0x1c, 0xf8, 0x7e, 0x72, 0xd4, 0xd3, + 0x90, 0x93, 0xf3, 0x35, 0x34, 0x54, 0x70, 0xb5, 0xf7, 0xcc, 0x86, 0xa2, 0xfc, 0x54, 0xbb, 0xd0, + 0x7a, 0x8f, 0x60, 0xbf, 0x4e, 0xb1, 0xa2, 0xeb, 0x37, 0x2f, 0xbb, 0xbe, 0x71, 0x91, 0xcf, 0x6d, + 0xfb, 0xbf, 0x42, 0x53, 0x0d, 0x14, 0x32, 0xa1, 0xf5, 0x38, 0xb9, 0x9b, 0xfc, 0xf1, 0x34, 0xb1, + 0x3f, 0x42, 0x06, 0xd4, 0xa7, 0x8f, 0x93, 0x99, 0xad, 0xa1, 0x0e, 0xb4, 0x67, 0xe3, 0xcb, 0xe9, + 0xec, 0x61, 0x74, 0x7d, 0x67, 0xd7, 0xd0, 0x31, 0x98, 0x57, 0xa3, 0xf1, 0xd8, 0xbf, 0xba, 0x1c, + 0x8d, 0x6f, 0xfe, 0xb2, 0xf5, 0xfe, 0x10, 0x9a, 0xca, 0xac, 0x78, 0x33, 0x73, 0x39, 0xbe, 0xca, + 0x8f, 0xda, 0x88, 0x57, 0xba, 0xc8, 0xb8, 0x32, 0x64, 0x10, 0xb9, 0xee, 0xff, 0xa7, 0xc1, 0x51, + 0x91, 0xd9, 0x53, 0xc8, 0x37, 0xf7, 0xc1, 0x0e, 0x4d, 0xc1, 0x9a, 0xe7, 0x9c, 0xfa, 0x51, 0xb0, + 0xdb, 0x89, 0x39, 0xd0, 0x64, 0xce, 0xdf, 0x55, 0xe6, 0x5c, 0xd4, 0x78, 0x57, 0x39, 0xa7, 0xf7, + 0x8a, 0x5f, 0x4c, 0xd5, 0xfc, 0x19, 0xe9, 0xfd, 0x02, 0xf6, 0x6b, 0x42, 0x39, 0x30, 0x43, 0x05, + 0xd6, 0x2d, 0x07, 0x66, 0x95, 0x93, 0xf9, 0x07, 0x9a, 0x23, 0xc6, 0x85, 0xb7, 0x01, 0xe8, 0x09, + 0xe7, 0x85, 0xa5, 0xcf, 0x5f, 0x5a, 0x52, 0x14, 0x8f, 0x70, 0xae, 0x2c, 0x08, 0x66, 0xef, 0x47, + 0x30, 0xf6, 0x40, 0x59, 0xb2, 0x51, 0x21, 0xd9, 0x28, 0x4b, 0x9e, 0x41, 0x4b, 0xf5, 0x4b, 0x91, + 0x0b, 0xf5, 0x28, 0xd8, 0xa5, 0x85, 0x68, 0xb7, 0x4a, 0x94, 0x48, 0xc6, 0xbc, 0xa9, 0x8e, 0xde, + 0x05, 0x00, 0x00, 0xff, 0xff, 0x75, 0x38, 0xad, 0x84, 0xe4, 0x05, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/proto/proto3_proto/proto3.proto b/vendor/github.com/golang/protobuf/proto/proto3_proto/proto3.proto new file mode 100644 index 000000000..204865571 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/proto3_proto/proto3.proto @@ -0,0 +1,87 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2014 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +import "google/protobuf/any.proto"; +import "testdata/test.proto"; + +package proto3_proto; + +message Message { + enum Humour { + UNKNOWN = 0; + PUNS = 1; + SLAPSTICK = 2; + BILL_BAILEY = 3; + } + + string name = 1; + Humour hilarity = 2; + uint32 height_in_cm = 3; + bytes data = 4; + int64 result_count = 7; + bool true_scotsman = 8; + float score = 9; + + repeated uint64 key = 5; + repeated int32 short_key = 19; + Nested nested = 6; + repeated Humour r_funny = 16; + + map terrain = 10; + testdata.SubDefaults proto2_field = 11; + map proto2_value = 13; + + google.protobuf.Any anything = 14; + repeated google.protobuf.Any many_things = 15; + + Message submessage = 17; + repeated Message children = 18; +} + +message Nested { + string bunny = 1; + bool cute = 2; +} + +message MessageWithMap { + map byte_mapping = 1; +} + + +message IntMap { + map rtt = 1; +} + +message IntMaps { + repeated IntMap maps = 1; +} diff --git a/vendor/github.com/golang/protobuf/proto/proto3_test.go b/vendor/github.com/golang/protobuf/proto/proto3_test.go new file mode 100644 index 000000000..735837f2d --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/proto3_test.go @@ -0,0 +1,135 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2014 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package proto_test + +import ( + "testing" + + "github.com/golang/protobuf/proto" + pb "github.com/golang/protobuf/proto/proto3_proto" + tpb "github.com/golang/protobuf/proto/testdata" +) + +func TestProto3ZeroValues(t *testing.T) { + tests := []struct { + desc string + m proto.Message + }{ + {"zero message", &pb.Message{}}, + {"empty bytes field", &pb.Message{Data: []byte{}}}, + } + for _, test := range tests { + b, err := proto.Marshal(test.m) + if err != nil { + t.Errorf("%s: proto.Marshal: %v", test.desc, err) + continue + } + if len(b) > 0 { + t.Errorf("%s: Encoding is non-empty: %q", test.desc, b) + } + } +} + +func TestRoundTripProto3(t *testing.T) { + m := &pb.Message{ + Name: "David", // (2 | 1<<3): 0x0a 0x05 "David" + Hilarity: pb.Message_PUNS, // (0 | 2<<3): 0x10 0x01 + HeightInCm: 178, // (0 | 3<<3): 0x18 0xb2 0x01 + Data: []byte("roboto"), // (2 | 4<<3): 0x20 0x06 "roboto" + ResultCount: 47, // (0 | 7<<3): 0x38 0x2f + TrueScotsman: true, // (0 | 8<<3): 0x40 0x01 + Score: 8.1, // (5 | 9<<3): 0x4d <8.1> + + Key: []uint64{1, 0xdeadbeef}, + Nested: &pb.Nested{ + Bunny: "Monty", + }, + } + t.Logf(" m: %v", m) + + b, err := proto.Marshal(m) + if err != nil { + t.Fatalf("proto.Marshal: %v", err) + } + t.Logf(" b: %q", b) + + m2 := new(pb.Message) + if err := proto.Unmarshal(b, m2); err != nil { + t.Fatalf("proto.Unmarshal: %v", err) + } + t.Logf("m2: %v", m2) + + if !proto.Equal(m, m2) { + t.Errorf("proto.Equal returned false:\n m: %v\nm2: %v", m, m2) + } +} + +func TestGettersForBasicTypesExist(t *testing.T) { + var m pb.Message + if got := m.GetNested().GetBunny(); got != "" { + t.Errorf("m.GetNested().GetBunny() = %q, want empty string", got) + } + if got := m.GetNested().GetCute(); got { + t.Errorf("m.GetNested().GetCute() = %t, want false", got) + } +} + +func TestProto3SetDefaults(t *testing.T) { + in := &pb.Message{ + Terrain: map[string]*pb.Nested{ + "meadow": new(pb.Nested), + }, + Proto2Field: new(tpb.SubDefaults), + Proto2Value: map[string]*tpb.SubDefaults{ + "badlands": new(tpb.SubDefaults), + }, + } + + got := proto.Clone(in).(*pb.Message) + proto.SetDefaults(got) + + // There are no defaults in proto3. Everything should be the zero value, but + // we need to remember to set defaults for nested proto2 messages. + want := &pb.Message{ + Terrain: map[string]*pb.Nested{ + "meadow": new(pb.Nested), + }, + Proto2Field: &tpb.SubDefaults{N: proto.Int64(7)}, + Proto2Value: map[string]*tpb.SubDefaults{ + "badlands": &tpb.SubDefaults{N: proto.Int64(7)}, + }, + } + + if !proto.Equal(got, want) { + t.Errorf("with in = %v\nproto.SetDefaults(in) =>\ngot %v\nwant %v", in, got, want) + } +} diff --git a/vendor/github.com/golang/protobuf/proto/size2_test.go b/vendor/github.com/golang/protobuf/proto/size2_test.go new file mode 100644 index 000000000..a2729c39a --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/size2_test.go @@ -0,0 +1,63 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2012 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package proto + +import ( + "testing" +) + +// This is a separate file and package from size_test.go because that one uses +// generated messages and thus may not be in package proto without having a circular +// dependency, whereas this file tests unexported details of size.go. + +func TestVarintSize(t *testing.T) { + // Check the edge cases carefully. + testCases := []struct { + n uint64 + size int + }{ + {0, 1}, + {1, 1}, + {127, 1}, + {128, 2}, + {16383, 2}, + {16384, 3}, + {1<<63 - 1, 9}, + {1 << 63, 10}, + } + for _, tc := range testCases { + size := sizeVarint(tc.n) + if size != tc.size { + t.Errorf("sizeVarint(%d) = %d, want %d", tc.n, size, tc.size) + } + } +} diff --git a/vendor/github.com/golang/protobuf/proto/size_test.go b/vendor/github.com/golang/protobuf/proto/size_test.go new file mode 100644 index 000000000..af1034dc7 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/size_test.go @@ -0,0 +1,164 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2012 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package proto_test + +import ( + "log" + "strings" + "testing" + + . "github.com/golang/protobuf/proto" + proto3pb "github.com/golang/protobuf/proto/proto3_proto" + pb "github.com/golang/protobuf/proto/testdata" +) + +var messageWithExtension1 = &pb.MyMessage{Count: Int32(7)} + +// messageWithExtension2 is in equal_test.go. +var messageWithExtension3 = &pb.MyMessage{Count: Int32(8)} + +func init() { + if err := SetExtension(messageWithExtension1, pb.E_Ext_More, &pb.Ext{Data: String("Abbott")}); err != nil { + log.Panicf("SetExtension: %v", err) + } + if err := SetExtension(messageWithExtension3, pb.E_Ext_More, &pb.Ext{Data: String("Costello")}); err != nil { + log.Panicf("SetExtension: %v", err) + } + + // Force messageWithExtension3 to have the extension encoded. + Marshal(messageWithExtension3) + +} + +var SizeTests = []struct { + desc string + pb Message +}{ + {"empty", &pb.OtherMessage{}}, + // Basic types. + {"bool", &pb.Defaults{F_Bool: Bool(true)}}, + {"int32", &pb.Defaults{F_Int32: Int32(12)}}, + {"negative int32", &pb.Defaults{F_Int32: Int32(-1)}}, + {"small int64", &pb.Defaults{F_Int64: Int64(1)}}, + {"big int64", &pb.Defaults{F_Int64: Int64(1 << 20)}}, + {"negative int64", &pb.Defaults{F_Int64: Int64(-1)}}, + {"fixed32", &pb.Defaults{F_Fixed32: Uint32(71)}}, + {"fixed64", &pb.Defaults{F_Fixed64: Uint64(72)}}, + {"uint32", &pb.Defaults{F_Uint32: Uint32(123)}}, + {"uint64", &pb.Defaults{F_Uint64: Uint64(124)}}, + {"float", &pb.Defaults{F_Float: Float32(12.6)}}, + {"double", &pb.Defaults{F_Double: Float64(13.9)}}, + {"string", &pb.Defaults{F_String: String("niles")}}, + {"bytes", &pb.Defaults{F_Bytes: []byte("wowsa")}}, + {"bytes, empty", &pb.Defaults{F_Bytes: []byte{}}}, + {"sint32", &pb.Defaults{F_Sint32: Int32(65)}}, + {"sint64", &pb.Defaults{F_Sint64: Int64(67)}}, + {"enum", &pb.Defaults{F_Enum: pb.Defaults_BLUE.Enum()}}, + // Repeated. + {"empty repeated bool", &pb.MoreRepeated{Bools: []bool{}}}, + {"repeated bool", &pb.MoreRepeated{Bools: []bool{false, true, true, false}}}, + {"packed repeated bool", &pb.MoreRepeated{BoolsPacked: []bool{false, true, true, false, true, true, true}}}, + {"repeated int32", &pb.MoreRepeated{Ints: []int32{1, 12203, 1729, -1}}}, + {"repeated int32 packed", &pb.MoreRepeated{IntsPacked: []int32{1, 12203, 1729}}}, + {"repeated int64 packed", &pb.MoreRepeated{Int64SPacked: []int64{ + // Need enough large numbers to verify that the header is counting the number of bytes + // for the field, not the number of elements. + 1 << 62, 1 << 62, 1 << 62, 1 << 62, 1 << 62, 1 << 62, 1 << 62, 1 << 62, 1 << 62, 1 << 62, + 1 << 62, 1 << 62, 1 << 62, 1 << 62, 1 << 62, 1 << 62, 1 << 62, 1 << 62, 1 << 62, 1 << 62, + }}}, + {"repeated string", &pb.MoreRepeated{Strings: []string{"r", "ken", "gri"}}}, + {"repeated fixed", &pb.MoreRepeated{Fixeds: []uint32{1, 2, 3, 4}}}, + // Nested. + {"nested", &pb.OldMessage{Nested: &pb.OldMessage_Nested{Name: String("whatever")}}}, + {"group", &pb.GroupOld{G: &pb.GroupOld_G{X: Int32(12345)}}}, + // Other things. + {"unrecognized", &pb.MoreRepeated{XXX_unrecognized: []byte{13<<3 | 0, 4}}}, + {"extension (unencoded)", messageWithExtension1}, + {"extension (encoded)", messageWithExtension3}, + // proto3 message + {"proto3 empty", &proto3pb.Message{}}, + {"proto3 bool", &proto3pb.Message{TrueScotsman: true}}, + {"proto3 int64", &proto3pb.Message{ResultCount: 1}}, + {"proto3 uint32", &proto3pb.Message{HeightInCm: 123}}, + {"proto3 float", &proto3pb.Message{Score: 12.6}}, + {"proto3 string", &proto3pb.Message{Name: "Snezana"}}, + {"proto3 bytes", &proto3pb.Message{Data: []byte("wowsa")}}, + {"proto3 bytes, empty", &proto3pb.Message{Data: []byte{}}}, + {"proto3 enum", &proto3pb.Message{Hilarity: proto3pb.Message_PUNS}}, + {"proto3 map field with empty bytes", &proto3pb.MessageWithMap{ByteMapping: map[bool][]byte{false: []byte{}}}}, + + {"map field", &pb.MessageWithMap{NameMapping: map[int32]string{1: "Rob", 7: "Andrew"}}}, + {"map field with message", &pb.MessageWithMap{MsgMapping: map[int64]*pb.FloatingPoint{0x7001: &pb.FloatingPoint{F: Float64(2.0)}}}}, + {"map field with bytes", &pb.MessageWithMap{ByteMapping: map[bool][]byte{true: []byte("this time for sure")}}}, + {"map field with empty bytes", &pb.MessageWithMap{ByteMapping: map[bool][]byte{true: []byte{}}}}, + + {"map field with big entry", &pb.MessageWithMap{NameMapping: map[int32]string{8: strings.Repeat("x", 125)}}}, + {"map field with big key and val", &pb.MessageWithMap{StrToStr: map[string]string{strings.Repeat("x", 70): strings.Repeat("y", 70)}}}, + {"map field with big numeric key", &pb.MessageWithMap{NameMapping: map[int32]string{0xf00d: "om nom nom"}}}, + + {"oneof not set", &pb.Oneof{}}, + {"oneof bool", &pb.Oneof{Union: &pb.Oneof_F_Bool{true}}}, + {"oneof zero int32", &pb.Oneof{Union: &pb.Oneof_F_Int32{0}}}, + {"oneof big int32", &pb.Oneof{Union: &pb.Oneof_F_Int32{1 << 20}}}, + {"oneof int64", &pb.Oneof{Union: &pb.Oneof_F_Int64{42}}}, + {"oneof fixed32", &pb.Oneof{Union: &pb.Oneof_F_Fixed32{43}}}, + {"oneof fixed64", &pb.Oneof{Union: &pb.Oneof_F_Fixed64{44}}}, + {"oneof uint32", &pb.Oneof{Union: &pb.Oneof_F_Uint32{45}}}, + {"oneof uint64", &pb.Oneof{Union: &pb.Oneof_F_Uint64{46}}}, + {"oneof float", &pb.Oneof{Union: &pb.Oneof_F_Float{47.1}}}, + {"oneof double", &pb.Oneof{Union: &pb.Oneof_F_Double{48.9}}}, + {"oneof string", &pb.Oneof{Union: &pb.Oneof_F_String{"Rhythmic Fman"}}}, + {"oneof bytes", &pb.Oneof{Union: &pb.Oneof_F_Bytes{[]byte("let go")}}}, + {"oneof sint32", &pb.Oneof{Union: &pb.Oneof_F_Sint32{50}}}, + {"oneof sint64", &pb.Oneof{Union: &pb.Oneof_F_Sint64{51}}}, + {"oneof enum", &pb.Oneof{Union: &pb.Oneof_F_Enum{pb.MyMessage_BLUE}}}, + {"message for oneof", &pb.GoTestField{Label: String("k"), Type: String("v")}}, + {"oneof message", &pb.Oneof{Union: &pb.Oneof_F_Message{&pb.GoTestField{Label: String("k"), Type: String("v")}}}}, + {"oneof group", &pb.Oneof{Union: &pb.Oneof_FGroup{&pb.Oneof_F_Group{X: Int32(52)}}}}, + {"oneof largest tag", &pb.Oneof{Union: &pb.Oneof_F_Largest_Tag{1}}}, + {"multiple oneofs", &pb.Oneof{Union: &pb.Oneof_F_Int32{1}, Tormato: &pb.Oneof_Value{2}}}, +} + +func TestSize(t *testing.T) { + for _, tc := range SizeTests { + size := Size(tc.pb) + b, err := Marshal(tc.pb) + if err != nil { + t.Errorf("%v: Marshal failed: %v", tc.desc, err) + continue + } + if size != len(b) { + t.Errorf("%v: Size(%v) = %d, want %d", tc.desc, tc.pb, size, len(b)) + t.Logf("%v: bytes: %#v", tc.desc, b) + } + } +} diff --git a/vendor/github.com/golang/protobuf/proto/testdata/Makefile b/vendor/github.com/golang/protobuf/proto/testdata/Makefile new file mode 100644 index 000000000..fc288628a --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/testdata/Makefile @@ -0,0 +1,50 @@ +# Go support for Protocol Buffers - Google's data interchange format +# +# Copyright 2010 The Go Authors. All rights reserved. +# https://github.com/golang/protobuf +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +include ../../Make.protobuf + +all: regenerate + +regenerate: + rm -f test.pb.go + make test.pb.go + +# The following rules are just aids to development. Not needed for typical testing. + +diff: regenerate + git diff test.pb.go + +restore: + cp test.pb.go.golden test.pb.go + +preserve: + cp test.pb.go test.pb.go.golden diff --git a/vendor/github.com/golang/protobuf/proto/testdata/golden_test.go b/vendor/github.com/golang/protobuf/proto/testdata/golden_test.go new file mode 100644 index 000000000..7172d0e96 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/testdata/golden_test.go @@ -0,0 +1,86 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2012 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Verify that the compiler output for test.proto is unchanged. + +package testdata + +import ( + "crypto/sha1" + "fmt" + "io/ioutil" + "os" + "os/exec" + "path/filepath" + "testing" +) + +// sum returns in string form (for easy comparison) the SHA-1 hash of the named file. +func sum(t *testing.T, name string) string { + data, err := ioutil.ReadFile(name) + if err != nil { + t.Fatal(err) + } + t.Logf("sum(%q): length is %d", name, len(data)) + hash := sha1.New() + _, err = hash.Write(data) + if err != nil { + t.Fatal(err) + } + return fmt.Sprintf("% x", hash.Sum(nil)) +} + +func run(t *testing.T, name string, args ...string) { + cmd := exec.Command(name, args...) + cmd.Stdin = os.Stdin + cmd.Stdout = os.Stdout + cmd.Stderr = os.Stderr + err := cmd.Run() + if err != nil { + t.Fatal(err) + } +} + +func TestGolden(t *testing.T) { + // Compute the original checksum. + goldenSum := sum(t, "test.pb.go") + // Run the proto compiler. + run(t, "protoc", "--go_out="+os.TempDir(), "test.proto") + newFile := filepath.Join(os.TempDir(), "test.pb.go") + defer os.Remove(newFile) + // Compute the new checksum. + newSum := sum(t, newFile) + // Verify + if newSum != goldenSum { + run(t, "diff", "-u", "test.pb.go", newFile) + t.Fatal("Code generated by protoc-gen-go has changed; update test.pb.go") + } +} diff --git a/vendor/github.com/golang/protobuf/proto/testdata/test.pb.go b/vendor/github.com/golang/protobuf/proto/testdata/test.pb.go new file mode 100644 index 000000000..e980d1a03 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/testdata/test.pb.go @@ -0,0 +1,4147 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: test.proto + +/* +Package testdata is a generated protocol buffer package. + +It is generated from these files: + test.proto + +It has these top-level messages: + GoEnum + GoTestField + GoTest + GoTestRequiredGroupField + GoSkipTest + NonPackedTest + PackedTest + MaxTag + OldMessage + NewMessage + InnerMessage + OtherMessage + RequiredInnerMessage + MyMessage + Ext + ComplexExtension + DefaultsMessage + MyMessageSet + Empty + MessageList + Strings + Defaults + SubDefaults + RepeatedEnum + MoreRepeated + GroupOld + GroupNew + FloatingPoint + MessageWithMap + Oneof + Communique +*/ +package testdata + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type FOO int32 + +const ( + FOO_FOO1 FOO = 1 +) + +var FOO_name = map[int32]string{ + 1: "FOO1", +} +var FOO_value = map[string]int32{ + "FOO1": 1, +} + +func (x FOO) Enum() *FOO { + p := new(FOO) + *p = x + return p +} +func (x FOO) String() string { + return proto.EnumName(FOO_name, int32(x)) +} +func (x *FOO) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(FOO_value, data, "FOO") + if err != nil { + return err + } + *x = FOO(value) + return nil +} +func (FOO) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } + +// An enum, for completeness. +type GoTest_KIND int32 + +const ( + GoTest_VOID GoTest_KIND = 0 + // Basic types + GoTest_BOOL GoTest_KIND = 1 + GoTest_BYTES GoTest_KIND = 2 + GoTest_FINGERPRINT GoTest_KIND = 3 + GoTest_FLOAT GoTest_KIND = 4 + GoTest_INT GoTest_KIND = 5 + GoTest_STRING GoTest_KIND = 6 + GoTest_TIME GoTest_KIND = 7 + // Groupings + GoTest_TUPLE GoTest_KIND = 8 + GoTest_ARRAY GoTest_KIND = 9 + GoTest_MAP GoTest_KIND = 10 + // Table types + GoTest_TABLE GoTest_KIND = 11 + // Functions + GoTest_FUNCTION GoTest_KIND = 12 +) + +var GoTest_KIND_name = map[int32]string{ + 0: "VOID", + 1: "BOOL", + 2: "BYTES", + 3: "FINGERPRINT", + 4: "FLOAT", + 5: "INT", + 6: "STRING", + 7: "TIME", + 8: "TUPLE", + 9: "ARRAY", + 10: "MAP", + 11: "TABLE", + 12: "FUNCTION", +} +var GoTest_KIND_value = map[string]int32{ + "VOID": 0, + "BOOL": 1, + "BYTES": 2, + "FINGERPRINT": 3, + "FLOAT": 4, + "INT": 5, + "STRING": 6, + "TIME": 7, + "TUPLE": 8, + "ARRAY": 9, + "MAP": 10, + "TABLE": 11, + "FUNCTION": 12, +} + +func (x GoTest_KIND) Enum() *GoTest_KIND { + p := new(GoTest_KIND) + *p = x + return p +} +func (x GoTest_KIND) String() string { + return proto.EnumName(GoTest_KIND_name, int32(x)) +} +func (x *GoTest_KIND) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(GoTest_KIND_value, data, "GoTest_KIND") + if err != nil { + return err + } + *x = GoTest_KIND(value) + return nil +} +func (GoTest_KIND) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{2, 0} } + +type MyMessage_Color int32 + +const ( + MyMessage_RED MyMessage_Color = 0 + MyMessage_GREEN MyMessage_Color = 1 + MyMessage_BLUE MyMessage_Color = 2 +) + +var MyMessage_Color_name = map[int32]string{ + 0: "RED", + 1: "GREEN", + 2: "BLUE", +} +var MyMessage_Color_value = map[string]int32{ + "RED": 0, + "GREEN": 1, + "BLUE": 2, +} + +func (x MyMessage_Color) Enum() *MyMessage_Color { + p := new(MyMessage_Color) + *p = x + return p +} +func (x MyMessage_Color) String() string { + return proto.EnumName(MyMessage_Color_name, int32(x)) +} +func (x *MyMessage_Color) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(MyMessage_Color_value, data, "MyMessage_Color") + if err != nil { + return err + } + *x = MyMessage_Color(value) + return nil +} +func (MyMessage_Color) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{13, 0} } + +type DefaultsMessage_DefaultsEnum int32 + +const ( + DefaultsMessage_ZERO DefaultsMessage_DefaultsEnum = 0 + DefaultsMessage_ONE DefaultsMessage_DefaultsEnum = 1 + DefaultsMessage_TWO DefaultsMessage_DefaultsEnum = 2 +) + +var DefaultsMessage_DefaultsEnum_name = map[int32]string{ + 0: "ZERO", + 1: "ONE", + 2: "TWO", +} +var DefaultsMessage_DefaultsEnum_value = map[string]int32{ + "ZERO": 0, + "ONE": 1, + "TWO": 2, +} + +func (x DefaultsMessage_DefaultsEnum) Enum() *DefaultsMessage_DefaultsEnum { + p := new(DefaultsMessage_DefaultsEnum) + *p = x + return p +} +func (x DefaultsMessage_DefaultsEnum) String() string { + return proto.EnumName(DefaultsMessage_DefaultsEnum_name, int32(x)) +} +func (x *DefaultsMessage_DefaultsEnum) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(DefaultsMessage_DefaultsEnum_value, data, "DefaultsMessage_DefaultsEnum") + if err != nil { + return err + } + *x = DefaultsMessage_DefaultsEnum(value) + return nil +} +func (DefaultsMessage_DefaultsEnum) EnumDescriptor() ([]byte, []int) { + return fileDescriptor0, []int{16, 0} +} + +type Defaults_Color int32 + +const ( + Defaults_RED Defaults_Color = 0 + Defaults_GREEN Defaults_Color = 1 + Defaults_BLUE Defaults_Color = 2 +) + +var Defaults_Color_name = map[int32]string{ + 0: "RED", + 1: "GREEN", + 2: "BLUE", +} +var Defaults_Color_value = map[string]int32{ + "RED": 0, + "GREEN": 1, + "BLUE": 2, +} + +func (x Defaults_Color) Enum() *Defaults_Color { + p := new(Defaults_Color) + *p = x + return p +} +func (x Defaults_Color) String() string { + return proto.EnumName(Defaults_Color_name, int32(x)) +} +func (x *Defaults_Color) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(Defaults_Color_value, data, "Defaults_Color") + if err != nil { + return err + } + *x = Defaults_Color(value) + return nil +} +func (Defaults_Color) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{21, 0} } + +type RepeatedEnum_Color int32 + +const ( + RepeatedEnum_RED RepeatedEnum_Color = 1 +) + +var RepeatedEnum_Color_name = map[int32]string{ + 1: "RED", +} +var RepeatedEnum_Color_value = map[string]int32{ + "RED": 1, +} + +func (x RepeatedEnum_Color) Enum() *RepeatedEnum_Color { + p := new(RepeatedEnum_Color) + *p = x + return p +} +func (x RepeatedEnum_Color) String() string { + return proto.EnumName(RepeatedEnum_Color_name, int32(x)) +} +func (x *RepeatedEnum_Color) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(RepeatedEnum_Color_value, data, "RepeatedEnum_Color") + if err != nil { + return err + } + *x = RepeatedEnum_Color(value) + return nil +} +func (RepeatedEnum_Color) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{23, 0} } + +type GoEnum struct { + Foo *FOO `protobuf:"varint,1,req,name=foo,enum=testdata.FOO" json:"foo,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *GoEnum) Reset() { *m = GoEnum{} } +func (m *GoEnum) String() string { return proto.CompactTextString(m) } +func (*GoEnum) ProtoMessage() {} +func (*GoEnum) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } + +func (m *GoEnum) GetFoo() FOO { + if m != nil && m.Foo != nil { + return *m.Foo + } + return FOO_FOO1 +} + +type GoTestField struct { + Label *string `protobuf:"bytes,1,req,name=Label" json:"Label,omitempty"` + Type *string `protobuf:"bytes,2,req,name=Type" json:"Type,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *GoTestField) Reset() { *m = GoTestField{} } +func (m *GoTestField) String() string { return proto.CompactTextString(m) } +func (*GoTestField) ProtoMessage() {} +func (*GoTestField) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } + +func (m *GoTestField) GetLabel() string { + if m != nil && m.Label != nil { + return *m.Label + } + return "" +} + +func (m *GoTestField) GetType() string { + if m != nil && m.Type != nil { + return *m.Type + } + return "" +} + +type GoTest struct { + // Some typical parameters + Kind *GoTest_KIND `protobuf:"varint,1,req,name=Kind,enum=testdata.GoTest_KIND" json:"Kind,omitempty"` + Table *string `protobuf:"bytes,2,opt,name=Table" json:"Table,omitempty"` + Param *int32 `protobuf:"varint,3,opt,name=Param" json:"Param,omitempty"` + // Required, repeated and optional foreign fields. + RequiredField *GoTestField `protobuf:"bytes,4,req,name=RequiredField" json:"RequiredField,omitempty"` + RepeatedField []*GoTestField `protobuf:"bytes,5,rep,name=RepeatedField" json:"RepeatedField,omitempty"` + OptionalField *GoTestField `protobuf:"bytes,6,opt,name=OptionalField" json:"OptionalField,omitempty"` + // Required fields of all basic types + F_BoolRequired *bool `protobuf:"varint,10,req,name=F_Bool_required,json=FBoolRequired" json:"F_Bool_required,omitempty"` + F_Int32Required *int32 `protobuf:"varint,11,req,name=F_Int32_required,json=FInt32Required" json:"F_Int32_required,omitempty"` + F_Int64Required *int64 `protobuf:"varint,12,req,name=F_Int64_required,json=FInt64Required" json:"F_Int64_required,omitempty"` + F_Fixed32Required *uint32 `protobuf:"fixed32,13,req,name=F_Fixed32_required,json=FFixed32Required" json:"F_Fixed32_required,omitempty"` + F_Fixed64Required *uint64 `protobuf:"fixed64,14,req,name=F_Fixed64_required,json=FFixed64Required" json:"F_Fixed64_required,omitempty"` + F_Uint32Required *uint32 `protobuf:"varint,15,req,name=F_Uint32_required,json=FUint32Required" json:"F_Uint32_required,omitempty"` + F_Uint64Required *uint64 `protobuf:"varint,16,req,name=F_Uint64_required,json=FUint64Required" json:"F_Uint64_required,omitempty"` + F_FloatRequired *float32 `protobuf:"fixed32,17,req,name=F_Float_required,json=FFloatRequired" json:"F_Float_required,omitempty"` + F_DoubleRequired *float64 `protobuf:"fixed64,18,req,name=F_Double_required,json=FDoubleRequired" json:"F_Double_required,omitempty"` + F_StringRequired *string `protobuf:"bytes,19,req,name=F_String_required,json=FStringRequired" json:"F_String_required,omitempty"` + F_BytesRequired []byte `protobuf:"bytes,101,req,name=F_Bytes_required,json=FBytesRequired" json:"F_Bytes_required,omitempty"` + F_Sint32Required *int32 `protobuf:"zigzag32,102,req,name=F_Sint32_required,json=FSint32Required" json:"F_Sint32_required,omitempty"` + F_Sint64Required *int64 `protobuf:"zigzag64,103,req,name=F_Sint64_required,json=FSint64Required" json:"F_Sint64_required,omitempty"` + // Repeated fields of all basic types + F_BoolRepeated []bool `protobuf:"varint,20,rep,name=F_Bool_repeated,json=FBoolRepeated" json:"F_Bool_repeated,omitempty"` + F_Int32Repeated []int32 `protobuf:"varint,21,rep,name=F_Int32_repeated,json=FInt32Repeated" json:"F_Int32_repeated,omitempty"` + F_Int64Repeated []int64 `protobuf:"varint,22,rep,name=F_Int64_repeated,json=FInt64Repeated" json:"F_Int64_repeated,omitempty"` + F_Fixed32Repeated []uint32 `protobuf:"fixed32,23,rep,name=F_Fixed32_repeated,json=FFixed32Repeated" json:"F_Fixed32_repeated,omitempty"` + F_Fixed64Repeated []uint64 `protobuf:"fixed64,24,rep,name=F_Fixed64_repeated,json=FFixed64Repeated" json:"F_Fixed64_repeated,omitempty"` + F_Uint32Repeated []uint32 `protobuf:"varint,25,rep,name=F_Uint32_repeated,json=FUint32Repeated" json:"F_Uint32_repeated,omitempty"` + F_Uint64Repeated []uint64 `protobuf:"varint,26,rep,name=F_Uint64_repeated,json=FUint64Repeated" json:"F_Uint64_repeated,omitempty"` + F_FloatRepeated []float32 `protobuf:"fixed32,27,rep,name=F_Float_repeated,json=FFloatRepeated" json:"F_Float_repeated,omitempty"` + F_DoubleRepeated []float64 `protobuf:"fixed64,28,rep,name=F_Double_repeated,json=FDoubleRepeated" json:"F_Double_repeated,omitempty"` + F_StringRepeated []string `protobuf:"bytes,29,rep,name=F_String_repeated,json=FStringRepeated" json:"F_String_repeated,omitempty"` + F_BytesRepeated [][]byte `protobuf:"bytes,201,rep,name=F_Bytes_repeated,json=FBytesRepeated" json:"F_Bytes_repeated,omitempty"` + F_Sint32Repeated []int32 `protobuf:"zigzag32,202,rep,name=F_Sint32_repeated,json=FSint32Repeated" json:"F_Sint32_repeated,omitempty"` + F_Sint64Repeated []int64 `protobuf:"zigzag64,203,rep,name=F_Sint64_repeated,json=FSint64Repeated" json:"F_Sint64_repeated,omitempty"` + // Optional fields of all basic types + F_BoolOptional *bool `protobuf:"varint,30,opt,name=F_Bool_optional,json=FBoolOptional" json:"F_Bool_optional,omitempty"` + F_Int32Optional *int32 `protobuf:"varint,31,opt,name=F_Int32_optional,json=FInt32Optional" json:"F_Int32_optional,omitempty"` + F_Int64Optional *int64 `protobuf:"varint,32,opt,name=F_Int64_optional,json=FInt64Optional" json:"F_Int64_optional,omitempty"` + F_Fixed32Optional *uint32 `protobuf:"fixed32,33,opt,name=F_Fixed32_optional,json=FFixed32Optional" json:"F_Fixed32_optional,omitempty"` + F_Fixed64Optional *uint64 `protobuf:"fixed64,34,opt,name=F_Fixed64_optional,json=FFixed64Optional" json:"F_Fixed64_optional,omitempty"` + F_Uint32Optional *uint32 `protobuf:"varint,35,opt,name=F_Uint32_optional,json=FUint32Optional" json:"F_Uint32_optional,omitempty"` + F_Uint64Optional *uint64 `protobuf:"varint,36,opt,name=F_Uint64_optional,json=FUint64Optional" json:"F_Uint64_optional,omitempty"` + F_FloatOptional *float32 `protobuf:"fixed32,37,opt,name=F_Float_optional,json=FFloatOptional" json:"F_Float_optional,omitempty"` + F_DoubleOptional *float64 `protobuf:"fixed64,38,opt,name=F_Double_optional,json=FDoubleOptional" json:"F_Double_optional,omitempty"` + F_StringOptional *string `protobuf:"bytes,39,opt,name=F_String_optional,json=FStringOptional" json:"F_String_optional,omitempty"` + F_BytesOptional []byte `protobuf:"bytes,301,opt,name=F_Bytes_optional,json=FBytesOptional" json:"F_Bytes_optional,omitempty"` + F_Sint32Optional *int32 `protobuf:"zigzag32,302,opt,name=F_Sint32_optional,json=FSint32Optional" json:"F_Sint32_optional,omitempty"` + F_Sint64Optional *int64 `protobuf:"zigzag64,303,opt,name=F_Sint64_optional,json=FSint64Optional" json:"F_Sint64_optional,omitempty"` + // Default-valued fields of all basic types + F_BoolDefaulted *bool `protobuf:"varint,40,opt,name=F_Bool_defaulted,json=FBoolDefaulted,def=1" json:"F_Bool_defaulted,omitempty"` + F_Int32Defaulted *int32 `protobuf:"varint,41,opt,name=F_Int32_defaulted,json=FInt32Defaulted,def=32" json:"F_Int32_defaulted,omitempty"` + F_Int64Defaulted *int64 `protobuf:"varint,42,opt,name=F_Int64_defaulted,json=FInt64Defaulted,def=64" json:"F_Int64_defaulted,omitempty"` + F_Fixed32Defaulted *uint32 `protobuf:"fixed32,43,opt,name=F_Fixed32_defaulted,json=FFixed32Defaulted,def=320" json:"F_Fixed32_defaulted,omitempty"` + F_Fixed64Defaulted *uint64 `protobuf:"fixed64,44,opt,name=F_Fixed64_defaulted,json=FFixed64Defaulted,def=640" json:"F_Fixed64_defaulted,omitempty"` + F_Uint32Defaulted *uint32 `protobuf:"varint,45,opt,name=F_Uint32_defaulted,json=FUint32Defaulted,def=3200" json:"F_Uint32_defaulted,omitempty"` + F_Uint64Defaulted *uint64 `protobuf:"varint,46,opt,name=F_Uint64_defaulted,json=FUint64Defaulted,def=6400" json:"F_Uint64_defaulted,omitempty"` + F_FloatDefaulted *float32 `protobuf:"fixed32,47,opt,name=F_Float_defaulted,json=FFloatDefaulted,def=314159" json:"F_Float_defaulted,omitempty"` + F_DoubleDefaulted *float64 `protobuf:"fixed64,48,opt,name=F_Double_defaulted,json=FDoubleDefaulted,def=271828" json:"F_Double_defaulted,omitempty"` + F_StringDefaulted *string `protobuf:"bytes,49,opt,name=F_String_defaulted,json=FStringDefaulted,def=hello, \"world!\"\n" json:"F_String_defaulted,omitempty"` + F_BytesDefaulted []byte `protobuf:"bytes,401,opt,name=F_Bytes_defaulted,json=FBytesDefaulted,def=Bignose" json:"F_Bytes_defaulted,omitempty"` + F_Sint32Defaulted *int32 `protobuf:"zigzag32,402,opt,name=F_Sint32_defaulted,json=FSint32Defaulted,def=-32" json:"F_Sint32_defaulted,omitempty"` + F_Sint64Defaulted *int64 `protobuf:"zigzag64,403,opt,name=F_Sint64_defaulted,json=FSint64Defaulted,def=-64" json:"F_Sint64_defaulted,omitempty"` + // Packed repeated fields (no string or bytes). + F_BoolRepeatedPacked []bool `protobuf:"varint,50,rep,packed,name=F_Bool_repeated_packed,json=FBoolRepeatedPacked" json:"F_Bool_repeated_packed,omitempty"` + F_Int32RepeatedPacked []int32 `protobuf:"varint,51,rep,packed,name=F_Int32_repeated_packed,json=FInt32RepeatedPacked" json:"F_Int32_repeated_packed,omitempty"` + F_Int64RepeatedPacked []int64 `protobuf:"varint,52,rep,packed,name=F_Int64_repeated_packed,json=FInt64RepeatedPacked" json:"F_Int64_repeated_packed,omitempty"` + F_Fixed32RepeatedPacked []uint32 `protobuf:"fixed32,53,rep,packed,name=F_Fixed32_repeated_packed,json=FFixed32RepeatedPacked" json:"F_Fixed32_repeated_packed,omitempty"` + F_Fixed64RepeatedPacked []uint64 `protobuf:"fixed64,54,rep,packed,name=F_Fixed64_repeated_packed,json=FFixed64RepeatedPacked" json:"F_Fixed64_repeated_packed,omitempty"` + F_Uint32RepeatedPacked []uint32 `protobuf:"varint,55,rep,packed,name=F_Uint32_repeated_packed,json=FUint32RepeatedPacked" json:"F_Uint32_repeated_packed,omitempty"` + F_Uint64RepeatedPacked []uint64 `protobuf:"varint,56,rep,packed,name=F_Uint64_repeated_packed,json=FUint64RepeatedPacked" json:"F_Uint64_repeated_packed,omitempty"` + F_FloatRepeatedPacked []float32 `protobuf:"fixed32,57,rep,packed,name=F_Float_repeated_packed,json=FFloatRepeatedPacked" json:"F_Float_repeated_packed,omitempty"` + F_DoubleRepeatedPacked []float64 `protobuf:"fixed64,58,rep,packed,name=F_Double_repeated_packed,json=FDoubleRepeatedPacked" json:"F_Double_repeated_packed,omitempty"` + F_Sint32RepeatedPacked []int32 `protobuf:"zigzag32,502,rep,packed,name=F_Sint32_repeated_packed,json=FSint32RepeatedPacked" json:"F_Sint32_repeated_packed,omitempty"` + F_Sint64RepeatedPacked []int64 `protobuf:"zigzag64,503,rep,packed,name=F_Sint64_repeated_packed,json=FSint64RepeatedPacked" json:"F_Sint64_repeated_packed,omitempty"` + Requiredgroup *GoTest_RequiredGroup `protobuf:"group,70,req,name=RequiredGroup,json=requiredgroup" json:"requiredgroup,omitempty"` + Repeatedgroup []*GoTest_RepeatedGroup `protobuf:"group,80,rep,name=RepeatedGroup,json=repeatedgroup" json:"repeatedgroup,omitempty"` + Optionalgroup *GoTest_OptionalGroup `protobuf:"group,90,opt,name=OptionalGroup,json=optionalgroup" json:"optionalgroup,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *GoTest) Reset() { *m = GoTest{} } +func (m *GoTest) String() string { return proto.CompactTextString(m) } +func (*GoTest) ProtoMessage() {} +func (*GoTest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } + +const Default_GoTest_F_BoolDefaulted bool = true +const Default_GoTest_F_Int32Defaulted int32 = 32 +const Default_GoTest_F_Int64Defaulted int64 = 64 +const Default_GoTest_F_Fixed32Defaulted uint32 = 320 +const Default_GoTest_F_Fixed64Defaulted uint64 = 640 +const Default_GoTest_F_Uint32Defaulted uint32 = 3200 +const Default_GoTest_F_Uint64Defaulted uint64 = 6400 +const Default_GoTest_F_FloatDefaulted float32 = 314159 +const Default_GoTest_F_DoubleDefaulted float64 = 271828 +const Default_GoTest_F_StringDefaulted string = "hello, \"world!\"\n" + +var Default_GoTest_F_BytesDefaulted []byte = []byte("Bignose") + +const Default_GoTest_F_Sint32Defaulted int32 = -32 +const Default_GoTest_F_Sint64Defaulted int64 = -64 + +func (m *GoTest) GetKind() GoTest_KIND { + if m != nil && m.Kind != nil { + return *m.Kind + } + return GoTest_VOID +} + +func (m *GoTest) GetTable() string { + if m != nil && m.Table != nil { + return *m.Table + } + return "" +} + +func (m *GoTest) GetParam() int32 { + if m != nil && m.Param != nil { + return *m.Param + } + return 0 +} + +func (m *GoTest) GetRequiredField() *GoTestField { + if m != nil { + return m.RequiredField + } + return nil +} + +func (m *GoTest) GetRepeatedField() []*GoTestField { + if m != nil { + return m.RepeatedField + } + return nil +} + +func (m *GoTest) GetOptionalField() *GoTestField { + if m != nil { + return m.OptionalField + } + return nil +} + +func (m *GoTest) GetF_BoolRequired() bool { + if m != nil && m.F_BoolRequired != nil { + return *m.F_BoolRequired + } + return false +} + +func (m *GoTest) GetF_Int32Required() int32 { + if m != nil && m.F_Int32Required != nil { + return *m.F_Int32Required + } + return 0 +} + +func (m *GoTest) GetF_Int64Required() int64 { + if m != nil && m.F_Int64Required != nil { + return *m.F_Int64Required + } + return 0 +} + +func (m *GoTest) GetF_Fixed32Required() uint32 { + if m != nil && m.F_Fixed32Required != nil { + return *m.F_Fixed32Required + } + return 0 +} + +func (m *GoTest) GetF_Fixed64Required() uint64 { + if m != nil && m.F_Fixed64Required != nil { + return *m.F_Fixed64Required + } + return 0 +} + +func (m *GoTest) GetF_Uint32Required() uint32 { + if m != nil && m.F_Uint32Required != nil { + return *m.F_Uint32Required + } + return 0 +} + +func (m *GoTest) GetF_Uint64Required() uint64 { + if m != nil && m.F_Uint64Required != nil { + return *m.F_Uint64Required + } + return 0 +} + +func (m *GoTest) GetF_FloatRequired() float32 { + if m != nil && m.F_FloatRequired != nil { + return *m.F_FloatRequired + } + return 0 +} + +func (m *GoTest) GetF_DoubleRequired() float64 { + if m != nil && m.F_DoubleRequired != nil { + return *m.F_DoubleRequired + } + return 0 +} + +func (m *GoTest) GetF_StringRequired() string { + if m != nil && m.F_StringRequired != nil { + return *m.F_StringRequired + } + return "" +} + +func (m *GoTest) GetF_BytesRequired() []byte { + if m != nil { + return m.F_BytesRequired + } + return nil +} + +func (m *GoTest) GetF_Sint32Required() int32 { + if m != nil && m.F_Sint32Required != nil { + return *m.F_Sint32Required + } + return 0 +} + +func (m *GoTest) GetF_Sint64Required() int64 { + if m != nil && m.F_Sint64Required != nil { + return *m.F_Sint64Required + } + return 0 +} + +func (m *GoTest) GetF_BoolRepeated() []bool { + if m != nil { + return m.F_BoolRepeated + } + return nil +} + +func (m *GoTest) GetF_Int32Repeated() []int32 { + if m != nil { + return m.F_Int32Repeated + } + return nil +} + +func (m *GoTest) GetF_Int64Repeated() []int64 { + if m != nil { + return m.F_Int64Repeated + } + return nil +} + +func (m *GoTest) GetF_Fixed32Repeated() []uint32 { + if m != nil { + return m.F_Fixed32Repeated + } + return nil +} + +func (m *GoTest) GetF_Fixed64Repeated() []uint64 { + if m != nil { + return m.F_Fixed64Repeated + } + return nil +} + +func (m *GoTest) GetF_Uint32Repeated() []uint32 { + if m != nil { + return m.F_Uint32Repeated + } + return nil +} + +func (m *GoTest) GetF_Uint64Repeated() []uint64 { + if m != nil { + return m.F_Uint64Repeated + } + return nil +} + +func (m *GoTest) GetF_FloatRepeated() []float32 { + if m != nil { + return m.F_FloatRepeated + } + return nil +} + +func (m *GoTest) GetF_DoubleRepeated() []float64 { + if m != nil { + return m.F_DoubleRepeated + } + return nil +} + +func (m *GoTest) GetF_StringRepeated() []string { + if m != nil { + return m.F_StringRepeated + } + return nil +} + +func (m *GoTest) GetF_BytesRepeated() [][]byte { + if m != nil { + return m.F_BytesRepeated + } + return nil +} + +func (m *GoTest) GetF_Sint32Repeated() []int32 { + if m != nil { + return m.F_Sint32Repeated + } + return nil +} + +func (m *GoTest) GetF_Sint64Repeated() []int64 { + if m != nil { + return m.F_Sint64Repeated + } + return nil +} + +func (m *GoTest) GetF_BoolOptional() bool { + if m != nil && m.F_BoolOptional != nil { + return *m.F_BoolOptional + } + return false +} + +func (m *GoTest) GetF_Int32Optional() int32 { + if m != nil && m.F_Int32Optional != nil { + return *m.F_Int32Optional + } + return 0 +} + +func (m *GoTest) GetF_Int64Optional() int64 { + if m != nil && m.F_Int64Optional != nil { + return *m.F_Int64Optional + } + return 0 +} + +func (m *GoTest) GetF_Fixed32Optional() uint32 { + if m != nil && m.F_Fixed32Optional != nil { + return *m.F_Fixed32Optional + } + return 0 +} + +func (m *GoTest) GetF_Fixed64Optional() uint64 { + if m != nil && m.F_Fixed64Optional != nil { + return *m.F_Fixed64Optional + } + return 0 +} + +func (m *GoTest) GetF_Uint32Optional() uint32 { + if m != nil && m.F_Uint32Optional != nil { + return *m.F_Uint32Optional + } + return 0 +} + +func (m *GoTest) GetF_Uint64Optional() uint64 { + if m != nil && m.F_Uint64Optional != nil { + return *m.F_Uint64Optional + } + return 0 +} + +func (m *GoTest) GetF_FloatOptional() float32 { + if m != nil && m.F_FloatOptional != nil { + return *m.F_FloatOptional + } + return 0 +} + +func (m *GoTest) GetF_DoubleOptional() float64 { + if m != nil && m.F_DoubleOptional != nil { + return *m.F_DoubleOptional + } + return 0 +} + +func (m *GoTest) GetF_StringOptional() string { + if m != nil && m.F_StringOptional != nil { + return *m.F_StringOptional + } + return "" +} + +func (m *GoTest) GetF_BytesOptional() []byte { + if m != nil { + return m.F_BytesOptional + } + return nil +} + +func (m *GoTest) GetF_Sint32Optional() int32 { + if m != nil && m.F_Sint32Optional != nil { + return *m.F_Sint32Optional + } + return 0 +} + +func (m *GoTest) GetF_Sint64Optional() int64 { + if m != nil && m.F_Sint64Optional != nil { + return *m.F_Sint64Optional + } + return 0 +} + +func (m *GoTest) GetF_BoolDefaulted() bool { + if m != nil && m.F_BoolDefaulted != nil { + return *m.F_BoolDefaulted + } + return Default_GoTest_F_BoolDefaulted +} + +func (m *GoTest) GetF_Int32Defaulted() int32 { + if m != nil && m.F_Int32Defaulted != nil { + return *m.F_Int32Defaulted + } + return Default_GoTest_F_Int32Defaulted +} + +func (m *GoTest) GetF_Int64Defaulted() int64 { + if m != nil && m.F_Int64Defaulted != nil { + return *m.F_Int64Defaulted + } + return Default_GoTest_F_Int64Defaulted +} + +func (m *GoTest) GetF_Fixed32Defaulted() uint32 { + if m != nil && m.F_Fixed32Defaulted != nil { + return *m.F_Fixed32Defaulted + } + return Default_GoTest_F_Fixed32Defaulted +} + +func (m *GoTest) GetF_Fixed64Defaulted() uint64 { + if m != nil && m.F_Fixed64Defaulted != nil { + return *m.F_Fixed64Defaulted + } + return Default_GoTest_F_Fixed64Defaulted +} + +func (m *GoTest) GetF_Uint32Defaulted() uint32 { + if m != nil && m.F_Uint32Defaulted != nil { + return *m.F_Uint32Defaulted + } + return Default_GoTest_F_Uint32Defaulted +} + +func (m *GoTest) GetF_Uint64Defaulted() uint64 { + if m != nil && m.F_Uint64Defaulted != nil { + return *m.F_Uint64Defaulted + } + return Default_GoTest_F_Uint64Defaulted +} + +func (m *GoTest) GetF_FloatDefaulted() float32 { + if m != nil && m.F_FloatDefaulted != nil { + return *m.F_FloatDefaulted + } + return Default_GoTest_F_FloatDefaulted +} + +func (m *GoTest) GetF_DoubleDefaulted() float64 { + if m != nil && m.F_DoubleDefaulted != nil { + return *m.F_DoubleDefaulted + } + return Default_GoTest_F_DoubleDefaulted +} + +func (m *GoTest) GetF_StringDefaulted() string { + if m != nil && m.F_StringDefaulted != nil { + return *m.F_StringDefaulted + } + return Default_GoTest_F_StringDefaulted +} + +func (m *GoTest) GetF_BytesDefaulted() []byte { + if m != nil && m.F_BytesDefaulted != nil { + return m.F_BytesDefaulted + } + return append([]byte(nil), Default_GoTest_F_BytesDefaulted...) +} + +func (m *GoTest) GetF_Sint32Defaulted() int32 { + if m != nil && m.F_Sint32Defaulted != nil { + return *m.F_Sint32Defaulted + } + return Default_GoTest_F_Sint32Defaulted +} + +func (m *GoTest) GetF_Sint64Defaulted() int64 { + if m != nil && m.F_Sint64Defaulted != nil { + return *m.F_Sint64Defaulted + } + return Default_GoTest_F_Sint64Defaulted +} + +func (m *GoTest) GetF_BoolRepeatedPacked() []bool { + if m != nil { + return m.F_BoolRepeatedPacked + } + return nil +} + +func (m *GoTest) GetF_Int32RepeatedPacked() []int32 { + if m != nil { + return m.F_Int32RepeatedPacked + } + return nil +} + +func (m *GoTest) GetF_Int64RepeatedPacked() []int64 { + if m != nil { + return m.F_Int64RepeatedPacked + } + return nil +} + +func (m *GoTest) GetF_Fixed32RepeatedPacked() []uint32 { + if m != nil { + return m.F_Fixed32RepeatedPacked + } + return nil +} + +func (m *GoTest) GetF_Fixed64RepeatedPacked() []uint64 { + if m != nil { + return m.F_Fixed64RepeatedPacked + } + return nil +} + +func (m *GoTest) GetF_Uint32RepeatedPacked() []uint32 { + if m != nil { + return m.F_Uint32RepeatedPacked + } + return nil +} + +func (m *GoTest) GetF_Uint64RepeatedPacked() []uint64 { + if m != nil { + return m.F_Uint64RepeatedPacked + } + return nil +} + +func (m *GoTest) GetF_FloatRepeatedPacked() []float32 { + if m != nil { + return m.F_FloatRepeatedPacked + } + return nil +} + +func (m *GoTest) GetF_DoubleRepeatedPacked() []float64 { + if m != nil { + return m.F_DoubleRepeatedPacked + } + return nil +} + +func (m *GoTest) GetF_Sint32RepeatedPacked() []int32 { + if m != nil { + return m.F_Sint32RepeatedPacked + } + return nil +} + +func (m *GoTest) GetF_Sint64RepeatedPacked() []int64 { + if m != nil { + return m.F_Sint64RepeatedPacked + } + return nil +} + +func (m *GoTest) GetRequiredgroup() *GoTest_RequiredGroup { + if m != nil { + return m.Requiredgroup + } + return nil +} + +func (m *GoTest) GetRepeatedgroup() []*GoTest_RepeatedGroup { + if m != nil { + return m.Repeatedgroup + } + return nil +} + +func (m *GoTest) GetOptionalgroup() *GoTest_OptionalGroup { + if m != nil { + return m.Optionalgroup + } + return nil +} + +// Required, repeated, and optional groups. +type GoTest_RequiredGroup struct { + RequiredField *string `protobuf:"bytes,71,req,name=RequiredField" json:"RequiredField,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *GoTest_RequiredGroup) Reset() { *m = GoTest_RequiredGroup{} } +func (m *GoTest_RequiredGroup) String() string { return proto.CompactTextString(m) } +func (*GoTest_RequiredGroup) ProtoMessage() {} +func (*GoTest_RequiredGroup) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2, 0} } + +func (m *GoTest_RequiredGroup) GetRequiredField() string { + if m != nil && m.RequiredField != nil { + return *m.RequiredField + } + return "" +} + +type GoTest_RepeatedGroup struct { + RequiredField *string `protobuf:"bytes,81,req,name=RequiredField" json:"RequiredField,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *GoTest_RepeatedGroup) Reset() { *m = GoTest_RepeatedGroup{} } +func (m *GoTest_RepeatedGroup) String() string { return proto.CompactTextString(m) } +func (*GoTest_RepeatedGroup) ProtoMessage() {} +func (*GoTest_RepeatedGroup) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2, 1} } + +func (m *GoTest_RepeatedGroup) GetRequiredField() string { + if m != nil && m.RequiredField != nil { + return *m.RequiredField + } + return "" +} + +type GoTest_OptionalGroup struct { + RequiredField *string `protobuf:"bytes,91,req,name=RequiredField" json:"RequiredField,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *GoTest_OptionalGroup) Reset() { *m = GoTest_OptionalGroup{} } +func (m *GoTest_OptionalGroup) String() string { return proto.CompactTextString(m) } +func (*GoTest_OptionalGroup) ProtoMessage() {} +func (*GoTest_OptionalGroup) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2, 2} } + +func (m *GoTest_OptionalGroup) GetRequiredField() string { + if m != nil && m.RequiredField != nil { + return *m.RequiredField + } + return "" +} + +// For testing a group containing a required field. +type GoTestRequiredGroupField struct { + Group *GoTestRequiredGroupField_Group `protobuf:"group,1,req,name=Group,json=group" json:"group,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *GoTestRequiredGroupField) Reset() { *m = GoTestRequiredGroupField{} } +func (m *GoTestRequiredGroupField) String() string { return proto.CompactTextString(m) } +func (*GoTestRequiredGroupField) ProtoMessage() {} +func (*GoTestRequiredGroupField) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } + +func (m *GoTestRequiredGroupField) GetGroup() *GoTestRequiredGroupField_Group { + if m != nil { + return m.Group + } + return nil +} + +type GoTestRequiredGroupField_Group struct { + Field *int32 `protobuf:"varint,2,req,name=Field" json:"Field,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *GoTestRequiredGroupField_Group) Reset() { *m = GoTestRequiredGroupField_Group{} } +func (m *GoTestRequiredGroupField_Group) String() string { return proto.CompactTextString(m) } +func (*GoTestRequiredGroupField_Group) ProtoMessage() {} +func (*GoTestRequiredGroupField_Group) Descriptor() ([]byte, []int) { + return fileDescriptor0, []int{3, 0} +} + +func (m *GoTestRequiredGroupField_Group) GetField() int32 { + if m != nil && m.Field != nil { + return *m.Field + } + return 0 +} + +// For testing skipping of unrecognized fields. +// Numbers are all big, larger than tag numbers in GoTestField, +// the message used in the corresponding test. +type GoSkipTest struct { + SkipInt32 *int32 `protobuf:"varint,11,req,name=skip_int32,json=skipInt32" json:"skip_int32,omitempty"` + SkipFixed32 *uint32 `protobuf:"fixed32,12,req,name=skip_fixed32,json=skipFixed32" json:"skip_fixed32,omitempty"` + SkipFixed64 *uint64 `protobuf:"fixed64,13,req,name=skip_fixed64,json=skipFixed64" json:"skip_fixed64,omitempty"` + SkipString *string `protobuf:"bytes,14,req,name=skip_string,json=skipString" json:"skip_string,omitempty"` + Skipgroup *GoSkipTest_SkipGroup `protobuf:"group,15,req,name=SkipGroup,json=skipgroup" json:"skipgroup,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *GoSkipTest) Reset() { *m = GoSkipTest{} } +func (m *GoSkipTest) String() string { return proto.CompactTextString(m) } +func (*GoSkipTest) ProtoMessage() {} +func (*GoSkipTest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } + +func (m *GoSkipTest) GetSkipInt32() int32 { + if m != nil && m.SkipInt32 != nil { + return *m.SkipInt32 + } + return 0 +} + +func (m *GoSkipTest) GetSkipFixed32() uint32 { + if m != nil && m.SkipFixed32 != nil { + return *m.SkipFixed32 + } + return 0 +} + +func (m *GoSkipTest) GetSkipFixed64() uint64 { + if m != nil && m.SkipFixed64 != nil { + return *m.SkipFixed64 + } + return 0 +} + +func (m *GoSkipTest) GetSkipString() string { + if m != nil && m.SkipString != nil { + return *m.SkipString + } + return "" +} + +func (m *GoSkipTest) GetSkipgroup() *GoSkipTest_SkipGroup { + if m != nil { + return m.Skipgroup + } + return nil +} + +type GoSkipTest_SkipGroup struct { + GroupInt32 *int32 `protobuf:"varint,16,req,name=group_int32,json=groupInt32" json:"group_int32,omitempty"` + GroupString *string `protobuf:"bytes,17,req,name=group_string,json=groupString" json:"group_string,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *GoSkipTest_SkipGroup) Reset() { *m = GoSkipTest_SkipGroup{} } +func (m *GoSkipTest_SkipGroup) String() string { return proto.CompactTextString(m) } +func (*GoSkipTest_SkipGroup) ProtoMessage() {} +func (*GoSkipTest_SkipGroup) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4, 0} } + +func (m *GoSkipTest_SkipGroup) GetGroupInt32() int32 { + if m != nil && m.GroupInt32 != nil { + return *m.GroupInt32 + } + return 0 +} + +func (m *GoSkipTest_SkipGroup) GetGroupString() string { + if m != nil && m.GroupString != nil { + return *m.GroupString + } + return "" +} + +// For testing packed/non-packed decoder switching. +// A serialized instance of one should be deserializable as the other. +type NonPackedTest struct { + A []int32 `protobuf:"varint,1,rep,name=a" json:"a,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *NonPackedTest) Reset() { *m = NonPackedTest{} } +func (m *NonPackedTest) String() string { return proto.CompactTextString(m) } +func (*NonPackedTest) ProtoMessage() {} +func (*NonPackedTest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} } + +func (m *NonPackedTest) GetA() []int32 { + if m != nil { + return m.A + } + return nil +} + +type PackedTest struct { + B []int32 `protobuf:"varint,1,rep,packed,name=b" json:"b,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *PackedTest) Reset() { *m = PackedTest{} } +func (m *PackedTest) String() string { return proto.CompactTextString(m) } +func (*PackedTest) ProtoMessage() {} +func (*PackedTest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{6} } + +func (m *PackedTest) GetB() []int32 { + if m != nil { + return m.B + } + return nil +} + +type MaxTag struct { + // Maximum possible tag number. + LastField *string `protobuf:"bytes,536870911,opt,name=last_field,json=lastField" json:"last_field,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *MaxTag) Reset() { *m = MaxTag{} } +func (m *MaxTag) String() string { return proto.CompactTextString(m) } +func (*MaxTag) ProtoMessage() {} +func (*MaxTag) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{7} } + +func (m *MaxTag) GetLastField() string { + if m != nil && m.LastField != nil { + return *m.LastField + } + return "" +} + +type OldMessage struct { + Nested *OldMessage_Nested `protobuf:"bytes,1,opt,name=nested" json:"nested,omitempty"` + Num *int32 `protobuf:"varint,2,opt,name=num" json:"num,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *OldMessage) Reset() { *m = OldMessage{} } +func (m *OldMessage) String() string { return proto.CompactTextString(m) } +func (*OldMessage) ProtoMessage() {} +func (*OldMessage) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{8} } + +func (m *OldMessage) GetNested() *OldMessage_Nested { + if m != nil { + return m.Nested + } + return nil +} + +func (m *OldMessage) GetNum() int32 { + if m != nil && m.Num != nil { + return *m.Num + } + return 0 +} + +type OldMessage_Nested struct { + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *OldMessage_Nested) Reset() { *m = OldMessage_Nested{} } +func (m *OldMessage_Nested) String() string { return proto.CompactTextString(m) } +func (*OldMessage_Nested) ProtoMessage() {} +func (*OldMessage_Nested) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{8, 0} } + +func (m *OldMessage_Nested) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +// NewMessage is wire compatible with OldMessage; +// imagine it as a future version. +type NewMessage struct { + Nested *NewMessage_Nested `protobuf:"bytes,1,opt,name=nested" json:"nested,omitempty"` + // This is an int32 in OldMessage. + Num *int64 `protobuf:"varint,2,opt,name=num" json:"num,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *NewMessage) Reset() { *m = NewMessage{} } +func (m *NewMessage) String() string { return proto.CompactTextString(m) } +func (*NewMessage) ProtoMessage() {} +func (*NewMessage) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{9} } + +func (m *NewMessage) GetNested() *NewMessage_Nested { + if m != nil { + return m.Nested + } + return nil +} + +func (m *NewMessage) GetNum() int64 { + if m != nil && m.Num != nil { + return *m.Num + } + return 0 +} + +type NewMessage_Nested struct { + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + FoodGroup *string `protobuf:"bytes,2,opt,name=food_group,json=foodGroup" json:"food_group,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *NewMessage_Nested) Reset() { *m = NewMessage_Nested{} } +func (m *NewMessage_Nested) String() string { return proto.CompactTextString(m) } +func (*NewMessage_Nested) ProtoMessage() {} +func (*NewMessage_Nested) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{9, 0} } + +func (m *NewMessage_Nested) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *NewMessage_Nested) GetFoodGroup() string { + if m != nil && m.FoodGroup != nil { + return *m.FoodGroup + } + return "" +} + +type InnerMessage struct { + Host *string `protobuf:"bytes,1,req,name=host" json:"host,omitempty"` + Port *int32 `protobuf:"varint,2,opt,name=port,def=4000" json:"port,omitempty"` + Connected *bool `protobuf:"varint,3,opt,name=connected" json:"connected,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *InnerMessage) Reset() { *m = InnerMessage{} } +func (m *InnerMessage) String() string { return proto.CompactTextString(m) } +func (*InnerMessage) ProtoMessage() {} +func (*InnerMessage) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{10} } + +const Default_InnerMessage_Port int32 = 4000 + +func (m *InnerMessage) GetHost() string { + if m != nil && m.Host != nil { + return *m.Host + } + return "" +} + +func (m *InnerMessage) GetPort() int32 { + if m != nil && m.Port != nil { + return *m.Port + } + return Default_InnerMessage_Port +} + +func (m *InnerMessage) GetConnected() bool { + if m != nil && m.Connected != nil { + return *m.Connected + } + return false +} + +type OtherMessage struct { + Key *int64 `protobuf:"varint,1,opt,name=key" json:"key,omitempty"` + Value []byte `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` + Weight *float32 `protobuf:"fixed32,3,opt,name=weight" json:"weight,omitempty"` + Inner *InnerMessage `protobuf:"bytes,4,opt,name=inner" json:"inner,omitempty"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *OtherMessage) Reset() { *m = OtherMessage{} } +func (m *OtherMessage) String() string { return proto.CompactTextString(m) } +func (*OtherMessage) ProtoMessage() {} +func (*OtherMessage) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{11} } + +var extRange_OtherMessage = []proto.ExtensionRange{ + {100, 536870911}, +} + +func (*OtherMessage) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_OtherMessage +} + +func (m *OtherMessage) GetKey() int64 { + if m != nil && m.Key != nil { + return *m.Key + } + return 0 +} + +func (m *OtherMessage) GetValue() []byte { + if m != nil { + return m.Value + } + return nil +} + +func (m *OtherMessage) GetWeight() float32 { + if m != nil && m.Weight != nil { + return *m.Weight + } + return 0 +} + +func (m *OtherMessage) GetInner() *InnerMessage { + if m != nil { + return m.Inner + } + return nil +} + +type RequiredInnerMessage struct { + LeoFinallyWonAnOscar *InnerMessage `protobuf:"bytes,1,req,name=leo_finally_won_an_oscar,json=leoFinallyWonAnOscar" json:"leo_finally_won_an_oscar,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *RequiredInnerMessage) Reset() { *m = RequiredInnerMessage{} } +func (m *RequiredInnerMessage) String() string { return proto.CompactTextString(m) } +func (*RequiredInnerMessage) ProtoMessage() {} +func (*RequiredInnerMessage) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{12} } + +func (m *RequiredInnerMessage) GetLeoFinallyWonAnOscar() *InnerMessage { + if m != nil { + return m.LeoFinallyWonAnOscar + } + return nil +} + +type MyMessage struct { + Count *int32 `protobuf:"varint,1,req,name=count" json:"count,omitempty"` + Name *string `protobuf:"bytes,2,opt,name=name" json:"name,omitempty"` + Quote *string `protobuf:"bytes,3,opt,name=quote" json:"quote,omitempty"` + Pet []string `protobuf:"bytes,4,rep,name=pet" json:"pet,omitempty"` + Inner *InnerMessage `protobuf:"bytes,5,opt,name=inner" json:"inner,omitempty"` + Others []*OtherMessage `protobuf:"bytes,6,rep,name=others" json:"others,omitempty"` + WeMustGoDeeper *RequiredInnerMessage `protobuf:"bytes,13,opt,name=we_must_go_deeper,json=weMustGoDeeper" json:"we_must_go_deeper,omitempty"` + RepInner []*InnerMessage `protobuf:"bytes,12,rep,name=rep_inner,json=repInner" json:"rep_inner,omitempty"` + Bikeshed *MyMessage_Color `protobuf:"varint,7,opt,name=bikeshed,enum=testdata.MyMessage_Color" json:"bikeshed,omitempty"` + Somegroup *MyMessage_SomeGroup `protobuf:"group,8,opt,name=SomeGroup,json=somegroup" json:"somegroup,omitempty"` + // This field becomes [][]byte in the generated code. + RepBytes [][]byte `protobuf:"bytes,10,rep,name=rep_bytes,json=repBytes" json:"rep_bytes,omitempty"` + Bigfloat *float64 `protobuf:"fixed64,11,opt,name=bigfloat" json:"bigfloat,omitempty"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *MyMessage) Reset() { *m = MyMessage{} } +func (m *MyMessage) String() string { return proto.CompactTextString(m) } +func (*MyMessage) ProtoMessage() {} +func (*MyMessage) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{13} } + +var extRange_MyMessage = []proto.ExtensionRange{ + {100, 536870911}, +} + +func (*MyMessage) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_MyMessage +} + +func (m *MyMessage) GetCount() int32 { + if m != nil && m.Count != nil { + return *m.Count + } + return 0 +} + +func (m *MyMessage) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *MyMessage) GetQuote() string { + if m != nil && m.Quote != nil { + return *m.Quote + } + return "" +} + +func (m *MyMessage) GetPet() []string { + if m != nil { + return m.Pet + } + return nil +} + +func (m *MyMessage) GetInner() *InnerMessage { + if m != nil { + return m.Inner + } + return nil +} + +func (m *MyMessage) GetOthers() []*OtherMessage { + if m != nil { + return m.Others + } + return nil +} + +func (m *MyMessage) GetWeMustGoDeeper() *RequiredInnerMessage { + if m != nil { + return m.WeMustGoDeeper + } + return nil +} + +func (m *MyMessage) GetRepInner() []*InnerMessage { + if m != nil { + return m.RepInner + } + return nil +} + +func (m *MyMessage) GetBikeshed() MyMessage_Color { + if m != nil && m.Bikeshed != nil { + return *m.Bikeshed + } + return MyMessage_RED +} + +func (m *MyMessage) GetSomegroup() *MyMessage_SomeGroup { + if m != nil { + return m.Somegroup + } + return nil +} + +func (m *MyMessage) GetRepBytes() [][]byte { + if m != nil { + return m.RepBytes + } + return nil +} + +func (m *MyMessage) GetBigfloat() float64 { + if m != nil && m.Bigfloat != nil { + return *m.Bigfloat + } + return 0 +} + +type MyMessage_SomeGroup struct { + GroupField *int32 `protobuf:"varint,9,opt,name=group_field,json=groupField" json:"group_field,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *MyMessage_SomeGroup) Reset() { *m = MyMessage_SomeGroup{} } +func (m *MyMessage_SomeGroup) String() string { return proto.CompactTextString(m) } +func (*MyMessage_SomeGroup) ProtoMessage() {} +func (*MyMessage_SomeGroup) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{13, 0} } + +func (m *MyMessage_SomeGroup) GetGroupField() int32 { + if m != nil && m.GroupField != nil { + return *m.GroupField + } + return 0 +} + +type Ext struct { + Data *string `protobuf:"bytes,1,opt,name=data" json:"data,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *Ext) Reset() { *m = Ext{} } +func (m *Ext) String() string { return proto.CompactTextString(m) } +func (*Ext) ProtoMessage() {} +func (*Ext) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{14} } + +func (m *Ext) GetData() string { + if m != nil && m.Data != nil { + return *m.Data + } + return "" +} + +var E_Ext_More = &proto.ExtensionDesc{ + ExtendedType: (*MyMessage)(nil), + ExtensionType: (*Ext)(nil), + Field: 103, + Name: "testdata.Ext.more", + Tag: "bytes,103,opt,name=more", + Filename: "test.proto", +} + +var E_Ext_Text = &proto.ExtensionDesc{ + ExtendedType: (*MyMessage)(nil), + ExtensionType: (*string)(nil), + Field: 104, + Name: "testdata.Ext.text", + Tag: "bytes,104,opt,name=text", + Filename: "test.proto", +} + +var E_Ext_Number = &proto.ExtensionDesc{ + ExtendedType: (*MyMessage)(nil), + ExtensionType: (*int32)(nil), + Field: 105, + Name: "testdata.Ext.number", + Tag: "varint,105,opt,name=number", + Filename: "test.proto", +} + +type ComplexExtension struct { + First *int32 `protobuf:"varint,1,opt,name=first" json:"first,omitempty"` + Second *int32 `protobuf:"varint,2,opt,name=second" json:"second,omitempty"` + Third []int32 `protobuf:"varint,3,rep,name=third" json:"third,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *ComplexExtension) Reset() { *m = ComplexExtension{} } +func (m *ComplexExtension) String() string { return proto.CompactTextString(m) } +func (*ComplexExtension) ProtoMessage() {} +func (*ComplexExtension) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{15} } + +func (m *ComplexExtension) GetFirst() int32 { + if m != nil && m.First != nil { + return *m.First + } + return 0 +} + +func (m *ComplexExtension) GetSecond() int32 { + if m != nil && m.Second != nil { + return *m.Second + } + return 0 +} + +func (m *ComplexExtension) GetThird() []int32 { + if m != nil { + return m.Third + } + return nil +} + +type DefaultsMessage struct { + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *DefaultsMessage) Reset() { *m = DefaultsMessage{} } +func (m *DefaultsMessage) String() string { return proto.CompactTextString(m) } +func (*DefaultsMessage) ProtoMessage() {} +func (*DefaultsMessage) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{16} } + +var extRange_DefaultsMessage = []proto.ExtensionRange{ + {100, 536870911}, +} + +func (*DefaultsMessage) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_DefaultsMessage +} + +type MyMessageSet struct { + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *MyMessageSet) Reset() { *m = MyMessageSet{} } +func (m *MyMessageSet) String() string { return proto.CompactTextString(m) } +func (*MyMessageSet) ProtoMessage() {} +func (*MyMessageSet) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{17} } + +func (m *MyMessageSet) Marshal() ([]byte, error) { + return proto.MarshalMessageSet(&m.XXX_InternalExtensions) +} +func (m *MyMessageSet) Unmarshal(buf []byte) error { + return proto.UnmarshalMessageSet(buf, &m.XXX_InternalExtensions) +} +func (m *MyMessageSet) MarshalJSON() ([]byte, error) { + return proto.MarshalMessageSetJSON(&m.XXX_InternalExtensions) +} +func (m *MyMessageSet) UnmarshalJSON(buf []byte) error { + return proto.UnmarshalMessageSetJSON(buf, &m.XXX_InternalExtensions) +} + +// ensure MyMessageSet satisfies proto.Marshaler and proto.Unmarshaler +var _ proto.Marshaler = (*MyMessageSet)(nil) +var _ proto.Unmarshaler = (*MyMessageSet)(nil) + +var extRange_MyMessageSet = []proto.ExtensionRange{ + {100, 2147483646}, +} + +func (*MyMessageSet) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_MyMessageSet +} + +type Empty struct { + XXX_unrecognized []byte `json:"-"` +} + +func (m *Empty) Reset() { *m = Empty{} } +func (m *Empty) String() string { return proto.CompactTextString(m) } +func (*Empty) ProtoMessage() {} +func (*Empty) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{18} } + +type MessageList struct { + Message []*MessageList_Message `protobuf:"group,1,rep,name=Message,json=message" json:"message,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *MessageList) Reset() { *m = MessageList{} } +func (m *MessageList) String() string { return proto.CompactTextString(m) } +func (*MessageList) ProtoMessage() {} +func (*MessageList) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{19} } + +func (m *MessageList) GetMessage() []*MessageList_Message { + if m != nil { + return m.Message + } + return nil +} + +type MessageList_Message struct { + Name *string `protobuf:"bytes,2,req,name=name" json:"name,omitempty"` + Count *int32 `protobuf:"varint,3,req,name=count" json:"count,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *MessageList_Message) Reset() { *m = MessageList_Message{} } +func (m *MessageList_Message) String() string { return proto.CompactTextString(m) } +func (*MessageList_Message) ProtoMessage() {} +func (*MessageList_Message) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{19, 0} } + +func (m *MessageList_Message) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *MessageList_Message) GetCount() int32 { + if m != nil && m.Count != nil { + return *m.Count + } + return 0 +} + +type Strings struct { + StringField *string `protobuf:"bytes,1,opt,name=string_field,json=stringField" json:"string_field,omitempty"` + BytesField []byte `protobuf:"bytes,2,opt,name=bytes_field,json=bytesField" json:"bytes_field,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *Strings) Reset() { *m = Strings{} } +func (m *Strings) String() string { return proto.CompactTextString(m) } +func (*Strings) ProtoMessage() {} +func (*Strings) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{20} } + +func (m *Strings) GetStringField() string { + if m != nil && m.StringField != nil { + return *m.StringField + } + return "" +} + +func (m *Strings) GetBytesField() []byte { + if m != nil { + return m.BytesField + } + return nil +} + +type Defaults struct { + // Default-valued fields of all basic types. + // Same as GoTest, but copied here to make testing easier. + F_Bool *bool `protobuf:"varint,1,opt,name=F_Bool,json=FBool,def=1" json:"F_Bool,omitempty"` + F_Int32 *int32 `protobuf:"varint,2,opt,name=F_Int32,json=FInt32,def=32" json:"F_Int32,omitempty"` + F_Int64 *int64 `protobuf:"varint,3,opt,name=F_Int64,json=FInt64,def=64" json:"F_Int64,omitempty"` + F_Fixed32 *uint32 `protobuf:"fixed32,4,opt,name=F_Fixed32,json=FFixed32,def=320" json:"F_Fixed32,omitempty"` + F_Fixed64 *uint64 `protobuf:"fixed64,5,opt,name=F_Fixed64,json=FFixed64,def=640" json:"F_Fixed64,omitempty"` + F_Uint32 *uint32 `protobuf:"varint,6,opt,name=F_Uint32,json=FUint32,def=3200" json:"F_Uint32,omitempty"` + F_Uint64 *uint64 `protobuf:"varint,7,opt,name=F_Uint64,json=FUint64,def=6400" json:"F_Uint64,omitempty"` + F_Float *float32 `protobuf:"fixed32,8,opt,name=F_Float,json=FFloat,def=314159" json:"F_Float,omitempty"` + F_Double *float64 `protobuf:"fixed64,9,opt,name=F_Double,json=FDouble,def=271828" json:"F_Double,omitempty"` + F_String *string `protobuf:"bytes,10,opt,name=F_String,json=FString,def=hello, \"world!\"\n" json:"F_String,omitempty"` + F_Bytes []byte `protobuf:"bytes,11,opt,name=F_Bytes,json=FBytes,def=Bignose" json:"F_Bytes,omitempty"` + F_Sint32 *int32 `protobuf:"zigzag32,12,opt,name=F_Sint32,json=FSint32,def=-32" json:"F_Sint32,omitempty"` + F_Sint64 *int64 `protobuf:"zigzag64,13,opt,name=F_Sint64,json=FSint64,def=-64" json:"F_Sint64,omitempty"` + F_Enum *Defaults_Color `protobuf:"varint,14,opt,name=F_Enum,json=FEnum,enum=testdata.Defaults_Color,def=1" json:"F_Enum,omitempty"` + // More fields with crazy defaults. + F_Pinf *float32 `protobuf:"fixed32,15,opt,name=F_Pinf,json=FPinf,def=inf" json:"F_Pinf,omitempty"` + F_Ninf *float32 `protobuf:"fixed32,16,opt,name=F_Ninf,json=FNinf,def=-inf" json:"F_Ninf,omitempty"` + F_Nan *float32 `protobuf:"fixed32,17,opt,name=F_Nan,json=FNan,def=nan" json:"F_Nan,omitempty"` + // Sub-message. + Sub *SubDefaults `protobuf:"bytes,18,opt,name=sub" json:"sub,omitempty"` + // Redundant but explicit defaults. + StrZero *string `protobuf:"bytes,19,opt,name=str_zero,json=strZero,def=" json:"str_zero,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *Defaults) Reset() { *m = Defaults{} } +func (m *Defaults) String() string { return proto.CompactTextString(m) } +func (*Defaults) ProtoMessage() {} +func (*Defaults) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{21} } + +const Default_Defaults_F_Bool bool = true +const Default_Defaults_F_Int32 int32 = 32 +const Default_Defaults_F_Int64 int64 = 64 +const Default_Defaults_F_Fixed32 uint32 = 320 +const Default_Defaults_F_Fixed64 uint64 = 640 +const Default_Defaults_F_Uint32 uint32 = 3200 +const Default_Defaults_F_Uint64 uint64 = 6400 +const Default_Defaults_F_Float float32 = 314159 +const Default_Defaults_F_Double float64 = 271828 +const Default_Defaults_F_String string = "hello, \"world!\"\n" + +var Default_Defaults_F_Bytes []byte = []byte("Bignose") + +const Default_Defaults_F_Sint32 int32 = -32 +const Default_Defaults_F_Sint64 int64 = -64 +const Default_Defaults_F_Enum Defaults_Color = Defaults_GREEN + +var Default_Defaults_F_Pinf float32 = float32(math.Inf(1)) +var Default_Defaults_F_Ninf float32 = float32(math.Inf(-1)) +var Default_Defaults_F_Nan float32 = float32(math.NaN()) + +func (m *Defaults) GetF_Bool() bool { + if m != nil && m.F_Bool != nil { + return *m.F_Bool + } + return Default_Defaults_F_Bool +} + +func (m *Defaults) GetF_Int32() int32 { + if m != nil && m.F_Int32 != nil { + return *m.F_Int32 + } + return Default_Defaults_F_Int32 +} + +func (m *Defaults) GetF_Int64() int64 { + if m != nil && m.F_Int64 != nil { + return *m.F_Int64 + } + return Default_Defaults_F_Int64 +} + +func (m *Defaults) GetF_Fixed32() uint32 { + if m != nil && m.F_Fixed32 != nil { + return *m.F_Fixed32 + } + return Default_Defaults_F_Fixed32 +} + +func (m *Defaults) GetF_Fixed64() uint64 { + if m != nil && m.F_Fixed64 != nil { + return *m.F_Fixed64 + } + return Default_Defaults_F_Fixed64 +} + +func (m *Defaults) GetF_Uint32() uint32 { + if m != nil && m.F_Uint32 != nil { + return *m.F_Uint32 + } + return Default_Defaults_F_Uint32 +} + +func (m *Defaults) GetF_Uint64() uint64 { + if m != nil && m.F_Uint64 != nil { + return *m.F_Uint64 + } + return Default_Defaults_F_Uint64 +} + +func (m *Defaults) GetF_Float() float32 { + if m != nil && m.F_Float != nil { + return *m.F_Float + } + return Default_Defaults_F_Float +} + +func (m *Defaults) GetF_Double() float64 { + if m != nil && m.F_Double != nil { + return *m.F_Double + } + return Default_Defaults_F_Double +} + +func (m *Defaults) GetF_String() string { + if m != nil && m.F_String != nil { + return *m.F_String + } + return Default_Defaults_F_String +} + +func (m *Defaults) GetF_Bytes() []byte { + if m != nil && m.F_Bytes != nil { + return m.F_Bytes + } + return append([]byte(nil), Default_Defaults_F_Bytes...) +} + +func (m *Defaults) GetF_Sint32() int32 { + if m != nil && m.F_Sint32 != nil { + return *m.F_Sint32 + } + return Default_Defaults_F_Sint32 +} + +func (m *Defaults) GetF_Sint64() int64 { + if m != nil && m.F_Sint64 != nil { + return *m.F_Sint64 + } + return Default_Defaults_F_Sint64 +} + +func (m *Defaults) GetF_Enum() Defaults_Color { + if m != nil && m.F_Enum != nil { + return *m.F_Enum + } + return Default_Defaults_F_Enum +} + +func (m *Defaults) GetF_Pinf() float32 { + if m != nil && m.F_Pinf != nil { + return *m.F_Pinf + } + return Default_Defaults_F_Pinf +} + +func (m *Defaults) GetF_Ninf() float32 { + if m != nil && m.F_Ninf != nil { + return *m.F_Ninf + } + return Default_Defaults_F_Ninf +} + +func (m *Defaults) GetF_Nan() float32 { + if m != nil && m.F_Nan != nil { + return *m.F_Nan + } + return Default_Defaults_F_Nan +} + +func (m *Defaults) GetSub() *SubDefaults { + if m != nil { + return m.Sub + } + return nil +} + +func (m *Defaults) GetStrZero() string { + if m != nil && m.StrZero != nil { + return *m.StrZero + } + return "" +} + +type SubDefaults struct { + N *int64 `protobuf:"varint,1,opt,name=n,def=7" json:"n,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *SubDefaults) Reset() { *m = SubDefaults{} } +func (m *SubDefaults) String() string { return proto.CompactTextString(m) } +func (*SubDefaults) ProtoMessage() {} +func (*SubDefaults) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{22} } + +const Default_SubDefaults_N int64 = 7 + +func (m *SubDefaults) GetN() int64 { + if m != nil && m.N != nil { + return *m.N + } + return Default_SubDefaults_N +} + +type RepeatedEnum struct { + Color []RepeatedEnum_Color `protobuf:"varint,1,rep,name=color,enum=testdata.RepeatedEnum_Color" json:"color,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *RepeatedEnum) Reset() { *m = RepeatedEnum{} } +func (m *RepeatedEnum) String() string { return proto.CompactTextString(m) } +func (*RepeatedEnum) ProtoMessage() {} +func (*RepeatedEnum) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{23} } + +func (m *RepeatedEnum) GetColor() []RepeatedEnum_Color { + if m != nil { + return m.Color + } + return nil +} + +type MoreRepeated struct { + Bools []bool `protobuf:"varint,1,rep,name=bools" json:"bools,omitempty"` + BoolsPacked []bool `protobuf:"varint,2,rep,packed,name=bools_packed,json=boolsPacked" json:"bools_packed,omitempty"` + Ints []int32 `protobuf:"varint,3,rep,name=ints" json:"ints,omitempty"` + IntsPacked []int32 `protobuf:"varint,4,rep,packed,name=ints_packed,json=intsPacked" json:"ints_packed,omitempty"` + Int64SPacked []int64 `protobuf:"varint,7,rep,packed,name=int64s_packed,json=int64sPacked" json:"int64s_packed,omitempty"` + Strings []string `protobuf:"bytes,5,rep,name=strings" json:"strings,omitempty"` + Fixeds []uint32 `protobuf:"fixed32,6,rep,name=fixeds" json:"fixeds,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *MoreRepeated) Reset() { *m = MoreRepeated{} } +func (m *MoreRepeated) String() string { return proto.CompactTextString(m) } +func (*MoreRepeated) ProtoMessage() {} +func (*MoreRepeated) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{24} } + +func (m *MoreRepeated) GetBools() []bool { + if m != nil { + return m.Bools + } + return nil +} + +func (m *MoreRepeated) GetBoolsPacked() []bool { + if m != nil { + return m.BoolsPacked + } + return nil +} + +func (m *MoreRepeated) GetInts() []int32 { + if m != nil { + return m.Ints + } + return nil +} + +func (m *MoreRepeated) GetIntsPacked() []int32 { + if m != nil { + return m.IntsPacked + } + return nil +} + +func (m *MoreRepeated) GetInt64SPacked() []int64 { + if m != nil { + return m.Int64SPacked + } + return nil +} + +func (m *MoreRepeated) GetStrings() []string { + if m != nil { + return m.Strings + } + return nil +} + +func (m *MoreRepeated) GetFixeds() []uint32 { + if m != nil { + return m.Fixeds + } + return nil +} + +type GroupOld struct { + G *GroupOld_G `protobuf:"group,101,opt,name=G,json=g" json:"g,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *GroupOld) Reset() { *m = GroupOld{} } +func (m *GroupOld) String() string { return proto.CompactTextString(m) } +func (*GroupOld) ProtoMessage() {} +func (*GroupOld) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{25} } + +func (m *GroupOld) GetG() *GroupOld_G { + if m != nil { + return m.G + } + return nil +} + +type GroupOld_G struct { + X *int32 `protobuf:"varint,2,opt,name=x" json:"x,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *GroupOld_G) Reset() { *m = GroupOld_G{} } +func (m *GroupOld_G) String() string { return proto.CompactTextString(m) } +func (*GroupOld_G) ProtoMessage() {} +func (*GroupOld_G) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{25, 0} } + +func (m *GroupOld_G) GetX() int32 { + if m != nil && m.X != nil { + return *m.X + } + return 0 +} + +type GroupNew struct { + G *GroupNew_G `protobuf:"group,101,opt,name=G,json=g" json:"g,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *GroupNew) Reset() { *m = GroupNew{} } +func (m *GroupNew) String() string { return proto.CompactTextString(m) } +func (*GroupNew) ProtoMessage() {} +func (*GroupNew) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{26} } + +func (m *GroupNew) GetG() *GroupNew_G { + if m != nil { + return m.G + } + return nil +} + +type GroupNew_G struct { + X *int32 `protobuf:"varint,2,opt,name=x" json:"x,omitempty"` + Y *int32 `protobuf:"varint,3,opt,name=y" json:"y,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *GroupNew_G) Reset() { *m = GroupNew_G{} } +func (m *GroupNew_G) String() string { return proto.CompactTextString(m) } +func (*GroupNew_G) ProtoMessage() {} +func (*GroupNew_G) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{26, 0} } + +func (m *GroupNew_G) GetX() int32 { + if m != nil && m.X != nil { + return *m.X + } + return 0 +} + +func (m *GroupNew_G) GetY() int32 { + if m != nil && m.Y != nil { + return *m.Y + } + return 0 +} + +type FloatingPoint struct { + F *float64 `protobuf:"fixed64,1,req,name=f" json:"f,omitempty"` + Exact *bool `protobuf:"varint,2,opt,name=exact" json:"exact,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *FloatingPoint) Reset() { *m = FloatingPoint{} } +func (m *FloatingPoint) String() string { return proto.CompactTextString(m) } +func (*FloatingPoint) ProtoMessage() {} +func (*FloatingPoint) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{27} } + +func (m *FloatingPoint) GetF() float64 { + if m != nil && m.F != nil { + return *m.F + } + return 0 +} + +func (m *FloatingPoint) GetExact() bool { + if m != nil && m.Exact != nil { + return *m.Exact + } + return false +} + +type MessageWithMap struct { + NameMapping map[int32]string `protobuf:"bytes,1,rep,name=name_mapping,json=nameMapping" json:"name_mapping,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + MsgMapping map[int64]*FloatingPoint `protobuf:"bytes,2,rep,name=msg_mapping,json=msgMapping" json:"msg_mapping,omitempty" protobuf_key:"zigzag64,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + ByteMapping map[bool][]byte `protobuf:"bytes,3,rep,name=byte_mapping,json=byteMapping" json:"byte_mapping,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + StrToStr map[string]string `protobuf:"bytes,4,rep,name=str_to_str,json=strToStr" json:"str_to_str,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *MessageWithMap) Reset() { *m = MessageWithMap{} } +func (m *MessageWithMap) String() string { return proto.CompactTextString(m) } +func (*MessageWithMap) ProtoMessage() {} +func (*MessageWithMap) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{28} } + +func (m *MessageWithMap) GetNameMapping() map[int32]string { + if m != nil { + return m.NameMapping + } + return nil +} + +func (m *MessageWithMap) GetMsgMapping() map[int64]*FloatingPoint { + if m != nil { + return m.MsgMapping + } + return nil +} + +func (m *MessageWithMap) GetByteMapping() map[bool][]byte { + if m != nil { + return m.ByteMapping + } + return nil +} + +func (m *MessageWithMap) GetStrToStr() map[string]string { + if m != nil { + return m.StrToStr + } + return nil +} + +type Oneof struct { + // Types that are valid to be assigned to Union: + // *Oneof_F_Bool + // *Oneof_F_Int32 + // *Oneof_F_Int64 + // *Oneof_F_Fixed32 + // *Oneof_F_Fixed64 + // *Oneof_F_Uint32 + // *Oneof_F_Uint64 + // *Oneof_F_Float + // *Oneof_F_Double + // *Oneof_F_String + // *Oneof_F_Bytes + // *Oneof_F_Sint32 + // *Oneof_F_Sint64 + // *Oneof_F_Enum + // *Oneof_F_Message + // *Oneof_FGroup + // *Oneof_F_Largest_Tag + Union isOneof_Union `protobuf_oneof:"union"` + // Types that are valid to be assigned to Tormato: + // *Oneof_Value + Tormato isOneof_Tormato `protobuf_oneof:"tormato"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *Oneof) Reset() { *m = Oneof{} } +func (m *Oneof) String() string { return proto.CompactTextString(m) } +func (*Oneof) ProtoMessage() {} +func (*Oneof) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{29} } + +type isOneof_Union interface { + isOneof_Union() +} +type isOneof_Tormato interface { + isOneof_Tormato() +} + +type Oneof_F_Bool struct { + F_Bool bool `protobuf:"varint,1,opt,name=F_Bool,json=FBool,oneof"` +} +type Oneof_F_Int32 struct { + F_Int32 int32 `protobuf:"varint,2,opt,name=F_Int32,json=FInt32,oneof"` +} +type Oneof_F_Int64 struct { + F_Int64 int64 `protobuf:"varint,3,opt,name=F_Int64,json=FInt64,oneof"` +} +type Oneof_F_Fixed32 struct { + F_Fixed32 uint32 `protobuf:"fixed32,4,opt,name=F_Fixed32,json=FFixed32,oneof"` +} +type Oneof_F_Fixed64 struct { + F_Fixed64 uint64 `protobuf:"fixed64,5,opt,name=F_Fixed64,json=FFixed64,oneof"` +} +type Oneof_F_Uint32 struct { + F_Uint32 uint32 `protobuf:"varint,6,opt,name=F_Uint32,json=FUint32,oneof"` +} +type Oneof_F_Uint64 struct { + F_Uint64 uint64 `protobuf:"varint,7,opt,name=F_Uint64,json=FUint64,oneof"` +} +type Oneof_F_Float struct { + F_Float float32 `protobuf:"fixed32,8,opt,name=F_Float,json=FFloat,oneof"` +} +type Oneof_F_Double struct { + F_Double float64 `protobuf:"fixed64,9,opt,name=F_Double,json=FDouble,oneof"` +} +type Oneof_F_String struct { + F_String string `protobuf:"bytes,10,opt,name=F_String,json=FString,oneof"` +} +type Oneof_F_Bytes struct { + F_Bytes []byte `protobuf:"bytes,11,opt,name=F_Bytes,json=FBytes,oneof"` +} +type Oneof_F_Sint32 struct { + F_Sint32 int32 `protobuf:"zigzag32,12,opt,name=F_Sint32,json=FSint32,oneof"` +} +type Oneof_F_Sint64 struct { + F_Sint64 int64 `protobuf:"zigzag64,13,opt,name=F_Sint64,json=FSint64,oneof"` +} +type Oneof_F_Enum struct { + F_Enum MyMessage_Color `protobuf:"varint,14,opt,name=F_Enum,json=FEnum,enum=testdata.MyMessage_Color,oneof"` +} +type Oneof_F_Message struct { + F_Message *GoTestField `protobuf:"bytes,15,opt,name=F_Message,json=FMessage,oneof"` +} +type Oneof_FGroup struct { + FGroup *Oneof_F_Group `protobuf:"group,16,opt,name=F_Group,json=fGroup,oneof"` +} +type Oneof_F_Largest_Tag struct { + F_Largest_Tag int32 `protobuf:"varint,536870911,opt,name=F_Largest_Tag,json=FLargestTag,oneof"` +} +type Oneof_Value struct { + Value int32 `protobuf:"varint,100,opt,name=value,oneof"` +} + +func (*Oneof_F_Bool) isOneof_Union() {} +func (*Oneof_F_Int32) isOneof_Union() {} +func (*Oneof_F_Int64) isOneof_Union() {} +func (*Oneof_F_Fixed32) isOneof_Union() {} +func (*Oneof_F_Fixed64) isOneof_Union() {} +func (*Oneof_F_Uint32) isOneof_Union() {} +func (*Oneof_F_Uint64) isOneof_Union() {} +func (*Oneof_F_Float) isOneof_Union() {} +func (*Oneof_F_Double) isOneof_Union() {} +func (*Oneof_F_String) isOneof_Union() {} +func (*Oneof_F_Bytes) isOneof_Union() {} +func (*Oneof_F_Sint32) isOneof_Union() {} +func (*Oneof_F_Sint64) isOneof_Union() {} +func (*Oneof_F_Enum) isOneof_Union() {} +func (*Oneof_F_Message) isOneof_Union() {} +func (*Oneof_FGroup) isOneof_Union() {} +func (*Oneof_F_Largest_Tag) isOneof_Union() {} +func (*Oneof_Value) isOneof_Tormato() {} + +func (m *Oneof) GetUnion() isOneof_Union { + if m != nil { + return m.Union + } + return nil +} +func (m *Oneof) GetTormato() isOneof_Tormato { + if m != nil { + return m.Tormato + } + return nil +} + +func (m *Oneof) GetF_Bool() bool { + if x, ok := m.GetUnion().(*Oneof_F_Bool); ok { + return x.F_Bool + } + return false +} + +func (m *Oneof) GetF_Int32() int32 { + if x, ok := m.GetUnion().(*Oneof_F_Int32); ok { + return x.F_Int32 + } + return 0 +} + +func (m *Oneof) GetF_Int64() int64 { + if x, ok := m.GetUnion().(*Oneof_F_Int64); ok { + return x.F_Int64 + } + return 0 +} + +func (m *Oneof) GetF_Fixed32() uint32 { + if x, ok := m.GetUnion().(*Oneof_F_Fixed32); ok { + return x.F_Fixed32 + } + return 0 +} + +func (m *Oneof) GetF_Fixed64() uint64 { + if x, ok := m.GetUnion().(*Oneof_F_Fixed64); ok { + return x.F_Fixed64 + } + return 0 +} + +func (m *Oneof) GetF_Uint32() uint32 { + if x, ok := m.GetUnion().(*Oneof_F_Uint32); ok { + return x.F_Uint32 + } + return 0 +} + +func (m *Oneof) GetF_Uint64() uint64 { + if x, ok := m.GetUnion().(*Oneof_F_Uint64); ok { + return x.F_Uint64 + } + return 0 +} + +func (m *Oneof) GetF_Float() float32 { + if x, ok := m.GetUnion().(*Oneof_F_Float); ok { + return x.F_Float + } + return 0 +} + +func (m *Oneof) GetF_Double() float64 { + if x, ok := m.GetUnion().(*Oneof_F_Double); ok { + return x.F_Double + } + return 0 +} + +func (m *Oneof) GetF_String() string { + if x, ok := m.GetUnion().(*Oneof_F_String); ok { + return x.F_String + } + return "" +} + +func (m *Oneof) GetF_Bytes() []byte { + if x, ok := m.GetUnion().(*Oneof_F_Bytes); ok { + return x.F_Bytes + } + return nil +} + +func (m *Oneof) GetF_Sint32() int32 { + if x, ok := m.GetUnion().(*Oneof_F_Sint32); ok { + return x.F_Sint32 + } + return 0 +} + +func (m *Oneof) GetF_Sint64() int64 { + if x, ok := m.GetUnion().(*Oneof_F_Sint64); ok { + return x.F_Sint64 + } + return 0 +} + +func (m *Oneof) GetF_Enum() MyMessage_Color { + if x, ok := m.GetUnion().(*Oneof_F_Enum); ok { + return x.F_Enum + } + return MyMessage_RED +} + +func (m *Oneof) GetF_Message() *GoTestField { + if x, ok := m.GetUnion().(*Oneof_F_Message); ok { + return x.F_Message + } + return nil +} + +func (m *Oneof) GetFGroup() *Oneof_F_Group { + if x, ok := m.GetUnion().(*Oneof_FGroup); ok { + return x.FGroup + } + return nil +} + +func (m *Oneof) GetF_Largest_Tag() int32 { + if x, ok := m.GetUnion().(*Oneof_F_Largest_Tag); ok { + return x.F_Largest_Tag + } + return 0 +} + +func (m *Oneof) GetValue() int32 { + if x, ok := m.GetTormato().(*Oneof_Value); ok { + return x.Value + } + return 0 +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Oneof) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Oneof_OneofMarshaler, _Oneof_OneofUnmarshaler, _Oneof_OneofSizer, []interface{}{ + (*Oneof_F_Bool)(nil), + (*Oneof_F_Int32)(nil), + (*Oneof_F_Int64)(nil), + (*Oneof_F_Fixed32)(nil), + (*Oneof_F_Fixed64)(nil), + (*Oneof_F_Uint32)(nil), + (*Oneof_F_Uint64)(nil), + (*Oneof_F_Float)(nil), + (*Oneof_F_Double)(nil), + (*Oneof_F_String)(nil), + (*Oneof_F_Bytes)(nil), + (*Oneof_F_Sint32)(nil), + (*Oneof_F_Sint64)(nil), + (*Oneof_F_Enum)(nil), + (*Oneof_F_Message)(nil), + (*Oneof_FGroup)(nil), + (*Oneof_F_Largest_Tag)(nil), + (*Oneof_Value)(nil), + } +} + +func _Oneof_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Oneof) + // union + switch x := m.Union.(type) { + case *Oneof_F_Bool: + t := uint64(0) + if x.F_Bool { + t = 1 + } + b.EncodeVarint(1<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *Oneof_F_Int32: + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.F_Int32)) + case *Oneof_F_Int64: + b.EncodeVarint(3<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.F_Int64)) + case *Oneof_F_Fixed32: + b.EncodeVarint(4<<3 | proto.WireFixed32) + b.EncodeFixed32(uint64(x.F_Fixed32)) + case *Oneof_F_Fixed64: + b.EncodeVarint(5<<3 | proto.WireFixed64) + b.EncodeFixed64(uint64(x.F_Fixed64)) + case *Oneof_F_Uint32: + b.EncodeVarint(6<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.F_Uint32)) + case *Oneof_F_Uint64: + b.EncodeVarint(7<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.F_Uint64)) + case *Oneof_F_Float: + b.EncodeVarint(8<<3 | proto.WireFixed32) + b.EncodeFixed32(uint64(math.Float32bits(x.F_Float))) + case *Oneof_F_Double: + b.EncodeVarint(9<<3 | proto.WireFixed64) + b.EncodeFixed64(math.Float64bits(x.F_Double)) + case *Oneof_F_String: + b.EncodeVarint(10<<3 | proto.WireBytes) + b.EncodeStringBytes(x.F_String) + case *Oneof_F_Bytes: + b.EncodeVarint(11<<3 | proto.WireBytes) + b.EncodeRawBytes(x.F_Bytes) + case *Oneof_F_Sint32: + b.EncodeVarint(12<<3 | proto.WireVarint) + b.EncodeZigzag32(uint64(x.F_Sint32)) + case *Oneof_F_Sint64: + b.EncodeVarint(13<<3 | proto.WireVarint) + b.EncodeZigzag64(uint64(x.F_Sint64)) + case *Oneof_F_Enum: + b.EncodeVarint(14<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.F_Enum)) + case *Oneof_F_Message: + b.EncodeVarint(15<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.F_Message); err != nil { + return err + } + case *Oneof_FGroup: + b.EncodeVarint(16<<3 | proto.WireStartGroup) + if err := b.Marshal(x.FGroup); err != nil { + return err + } + b.EncodeVarint(16<<3 | proto.WireEndGroup) + case *Oneof_F_Largest_Tag: + b.EncodeVarint(536870911<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.F_Largest_Tag)) + case nil: + default: + return fmt.Errorf("Oneof.Union has unexpected type %T", x) + } + // tormato + switch x := m.Tormato.(type) { + case *Oneof_Value: + b.EncodeVarint(100<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Value)) + case nil: + default: + return fmt.Errorf("Oneof.Tormato has unexpected type %T", x) + } + return nil +} + +func _Oneof_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Oneof) + switch tag { + case 1: // union.F_Bool + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Union = &Oneof_F_Bool{x != 0} + return true, err + case 2: // union.F_Int32 + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Union = &Oneof_F_Int32{int32(x)} + return true, err + case 3: // union.F_Int64 + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Union = &Oneof_F_Int64{int64(x)} + return true, err + case 4: // union.F_Fixed32 + if wire != proto.WireFixed32 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed32() + m.Union = &Oneof_F_Fixed32{uint32(x)} + return true, err + case 5: // union.F_Fixed64 + if wire != proto.WireFixed64 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed64() + m.Union = &Oneof_F_Fixed64{x} + return true, err + case 6: // union.F_Uint32 + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Union = &Oneof_F_Uint32{uint32(x)} + return true, err + case 7: // union.F_Uint64 + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Union = &Oneof_F_Uint64{x} + return true, err + case 8: // union.F_Float + if wire != proto.WireFixed32 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed32() + m.Union = &Oneof_F_Float{math.Float32frombits(uint32(x))} + return true, err + case 9: // union.F_Double + if wire != proto.WireFixed64 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed64() + m.Union = &Oneof_F_Double{math.Float64frombits(x)} + return true, err + case 10: // union.F_String + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Union = &Oneof_F_String{x} + return true, err + case 11: // union.F_Bytes + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Union = &Oneof_F_Bytes{x} + return true, err + case 12: // union.F_Sint32 + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeZigzag32() + m.Union = &Oneof_F_Sint32{int32(x)} + return true, err + case 13: // union.F_Sint64 + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeZigzag64() + m.Union = &Oneof_F_Sint64{int64(x)} + return true, err + case 14: // union.F_Enum + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Union = &Oneof_F_Enum{MyMessage_Color(x)} + return true, err + case 15: // union.F_Message + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GoTestField) + err := b.DecodeMessage(msg) + m.Union = &Oneof_F_Message{msg} + return true, err + case 16: // union.f_group + if wire != proto.WireStartGroup { + return true, proto.ErrInternalBadWireType + } + msg := new(Oneof_F_Group) + err := b.DecodeGroup(msg) + m.Union = &Oneof_FGroup{msg} + return true, err + case 536870911: // union.F_Largest_Tag + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Union = &Oneof_F_Largest_Tag{int32(x)} + return true, err + case 100: // tormato.value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Tormato = &Oneof_Value{int32(x)} + return true, err + default: + return false, nil + } +} + +func _Oneof_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Oneof) + // union + switch x := m.Union.(type) { + case *Oneof_F_Bool: + n += proto.SizeVarint(1<<3 | proto.WireVarint) + n += 1 + case *Oneof_F_Int32: + n += proto.SizeVarint(2<<3 | proto.WireVarint) + n += proto.SizeVarint(uint64(x.F_Int32)) + case *Oneof_F_Int64: + n += proto.SizeVarint(3<<3 | proto.WireVarint) + n += proto.SizeVarint(uint64(x.F_Int64)) + case *Oneof_F_Fixed32: + n += proto.SizeVarint(4<<3 | proto.WireFixed32) + n += 4 + case *Oneof_F_Fixed64: + n += proto.SizeVarint(5<<3 | proto.WireFixed64) + n += 8 + case *Oneof_F_Uint32: + n += proto.SizeVarint(6<<3 | proto.WireVarint) + n += proto.SizeVarint(uint64(x.F_Uint32)) + case *Oneof_F_Uint64: + n += proto.SizeVarint(7<<3 | proto.WireVarint) + n += proto.SizeVarint(uint64(x.F_Uint64)) + case *Oneof_F_Float: + n += proto.SizeVarint(8<<3 | proto.WireFixed32) + n += 4 + case *Oneof_F_Double: + n += proto.SizeVarint(9<<3 | proto.WireFixed64) + n += 8 + case *Oneof_F_String: + n += proto.SizeVarint(10<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(len(x.F_String))) + n += len(x.F_String) + case *Oneof_F_Bytes: + n += proto.SizeVarint(11<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(len(x.F_Bytes))) + n += len(x.F_Bytes) + case *Oneof_F_Sint32: + n += proto.SizeVarint(12<<3 | proto.WireVarint) + n += proto.SizeVarint(uint64((uint32(x.F_Sint32) << 1) ^ uint32((int32(x.F_Sint32) >> 31)))) + case *Oneof_F_Sint64: + n += proto.SizeVarint(13<<3 | proto.WireVarint) + n += proto.SizeVarint(uint64(uint64(x.F_Sint64<<1) ^ uint64((int64(x.F_Sint64) >> 63)))) + case *Oneof_F_Enum: + n += proto.SizeVarint(14<<3 | proto.WireVarint) + n += proto.SizeVarint(uint64(x.F_Enum)) + case *Oneof_F_Message: + s := proto.Size(x.F_Message) + n += proto.SizeVarint(15<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *Oneof_FGroup: + n += proto.SizeVarint(16<<3 | proto.WireStartGroup) + n += proto.Size(x.FGroup) + n += proto.SizeVarint(16<<3 | proto.WireEndGroup) + case *Oneof_F_Largest_Tag: + n += proto.SizeVarint(536870911<<3 | proto.WireVarint) + n += proto.SizeVarint(uint64(x.F_Largest_Tag)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + // tormato + switch x := m.Tormato.(type) { + case *Oneof_Value: + n += proto.SizeVarint(100<<3 | proto.WireVarint) + n += proto.SizeVarint(uint64(x.Value)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type Oneof_F_Group struct { + X *int32 `protobuf:"varint,17,opt,name=x" json:"x,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *Oneof_F_Group) Reset() { *m = Oneof_F_Group{} } +func (m *Oneof_F_Group) String() string { return proto.CompactTextString(m) } +func (*Oneof_F_Group) ProtoMessage() {} +func (*Oneof_F_Group) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{29, 0} } + +func (m *Oneof_F_Group) GetX() int32 { + if m != nil && m.X != nil { + return *m.X + } + return 0 +} + +type Communique struct { + MakeMeCry *bool `protobuf:"varint,1,opt,name=make_me_cry,json=makeMeCry" json:"make_me_cry,omitempty"` + // This is a oneof, called "union". + // + // Types that are valid to be assigned to Union: + // *Communique_Number + // *Communique_Name + // *Communique_Data + // *Communique_TempC + // *Communique_Col + // *Communique_Msg + Union isCommunique_Union `protobuf_oneof:"union"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *Communique) Reset() { *m = Communique{} } +func (m *Communique) String() string { return proto.CompactTextString(m) } +func (*Communique) ProtoMessage() {} +func (*Communique) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{30} } + +type isCommunique_Union interface { + isCommunique_Union() +} + +type Communique_Number struct { + Number int32 `protobuf:"varint,5,opt,name=number,oneof"` +} +type Communique_Name struct { + Name string `protobuf:"bytes,6,opt,name=name,oneof"` +} +type Communique_Data struct { + Data []byte `protobuf:"bytes,7,opt,name=data,oneof"` +} +type Communique_TempC struct { + TempC float64 `protobuf:"fixed64,8,opt,name=temp_c,json=tempC,oneof"` +} +type Communique_Col struct { + Col MyMessage_Color `protobuf:"varint,9,opt,name=col,enum=testdata.MyMessage_Color,oneof"` +} +type Communique_Msg struct { + Msg *Strings `protobuf:"bytes,10,opt,name=msg,oneof"` +} + +func (*Communique_Number) isCommunique_Union() {} +func (*Communique_Name) isCommunique_Union() {} +func (*Communique_Data) isCommunique_Union() {} +func (*Communique_TempC) isCommunique_Union() {} +func (*Communique_Col) isCommunique_Union() {} +func (*Communique_Msg) isCommunique_Union() {} + +func (m *Communique) GetUnion() isCommunique_Union { + if m != nil { + return m.Union + } + return nil +} + +func (m *Communique) GetMakeMeCry() bool { + if m != nil && m.MakeMeCry != nil { + return *m.MakeMeCry + } + return false +} + +func (m *Communique) GetNumber() int32 { + if x, ok := m.GetUnion().(*Communique_Number); ok { + return x.Number + } + return 0 +} + +func (m *Communique) GetName() string { + if x, ok := m.GetUnion().(*Communique_Name); ok { + return x.Name + } + return "" +} + +func (m *Communique) GetData() []byte { + if x, ok := m.GetUnion().(*Communique_Data); ok { + return x.Data + } + return nil +} + +func (m *Communique) GetTempC() float64 { + if x, ok := m.GetUnion().(*Communique_TempC); ok { + return x.TempC + } + return 0 +} + +func (m *Communique) GetCol() MyMessage_Color { + if x, ok := m.GetUnion().(*Communique_Col); ok { + return x.Col + } + return MyMessage_RED +} + +func (m *Communique) GetMsg() *Strings { + if x, ok := m.GetUnion().(*Communique_Msg); ok { + return x.Msg + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Communique) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Communique_OneofMarshaler, _Communique_OneofUnmarshaler, _Communique_OneofSizer, []interface{}{ + (*Communique_Number)(nil), + (*Communique_Name)(nil), + (*Communique_Data)(nil), + (*Communique_TempC)(nil), + (*Communique_Col)(nil), + (*Communique_Msg)(nil), + } +} + +func _Communique_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Communique) + // union + switch x := m.Union.(type) { + case *Communique_Number: + b.EncodeVarint(5<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Number)) + case *Communique_Name: + b.EncodeVarint(6<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Name) + case *Communique_Data: + b.EncodeVarint(7<<3 | proto.WireBytes) + b.EncodeRawBytes(x.Data) + case *Communique_TempC: + b.EncodeVarint(8<<3 | proto.WireFixed64) + b.EncodeFixed64(math.Float64bits(x.TempC)) + case *Communique_Col: + b.EncodeVarint(9<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Col)) + case *Communique_Msg: + b.EncodeVarint(10<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Msg); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Communique.Union has unexpected type %T", x) + } + return nil +} + +func _Communique_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Communique) + switch tag { + case 5: // union.number + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Union = &Communique_Number{int32(x)} + return true, err + case 6: // union.name + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Union = &Communique_Name{x} + return true, err + case 7: // union.data + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Union = &Communique_Data{x} + return true, err + case 8: // union.temp_c + if wire != proto.WireFixed64 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed64() + m.Union = &Communique_TempC{math.Float64frombits(x)} + return true, err + case 9: // union.col + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Union = &Communique_Col{MyMessage_Color(x)} + return true, err + case 10: // union.msg + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Strings) + err := b.DecodeMessage(msg) + m.Union = &Communique_Msg{msg} + return true, err + default: + return false, nil + } +} + +func _Communique_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Communique) + // union + switch x := m.Union.(type) { + case *Communique_Number: + n += proto.SizeVarint(5<<3 | proto.WireVarint) + n += proto.SizeVarint(uint64(x.Number)) + case *Communique_Name: + n += proto.SizeVarint(6<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(len(x.Name))) + n += len(x.Name) + case *Communique_Data: + n += proto.SizeVarint(7<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(len(x.Data))) + n += len(x.Data) + case *Communique_TempC: + n += proto.SizeVarint(8<<3 | proto.WireFixed64) + n += 8 + case *Communique_Col: + n += proto.SizeVarint(9<<3 | proto.WireVarint) + n += proto.SizeVarint(uint64(x.Col)) + case *Communique_Msg: + s := proto.Size(x.Msg) + n += proto.SizeVarint(10<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +var E_Greeting = &proto.ExtensionDesc{ + ExtendedType: (*MyMessage)(nil), + ExtensionType: ([]string)(nil), + Field: 106, + Name: "testdata.greeting", + Tag: "bytes,106,rep,name=greeting", + Filename: "test.proto", +} + +var E_Complex = &proto.ExtensionDesc{ + ExtendedType: (*OtherMessage)(nil), + ExtensionType: (*ComplexExtension)(nil), + Field: 200, + Name: "testdata.complex", + Tag: "bytes,200,opt,name=complex", + Filename: "test.proto", +} + +var E_RComplex = &proto.ExtensionDesc{ + ExtendedType: (*OtherMessage)(nil), + ExtensionType: ([]*ComplexExtension)(nil), + Field: 201, + Name: "testdata.r_complex", + Tag: "bytes,201,rep,name=r_complex,json=rComplex", + Filename: "test.proto", +} + +var E_NoDefaultDouble = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*float64)(nil), + Field: 101, + Name: "testdata.no_default_double", + Tag: "fixed64,101,opt,name=no_default_double,json=noDefaultDouble", + Filename: "test.proto", +} + +var E_NoDefaultFloat = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*float32)(nil), + Field: 102, + Name: "testdata.no_default_float", + Tag: "fixed32,102,opt,name=no_default_float,json=noDefaultFloat", + Filename: "test.proto", +} + +var E_NoDefaultInt32 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int32)(nil), + Field: 103, + Name: "testdata.no_default_int32", + Tag: "varint,103,opt,name=no_default_int32,json=noDefaultInt32", + Filename: "test.proto", +} + +var E_NoDefaultInt64 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int64)(nil), + Field: 104, + Name: "testdata.no_default_int64", + Tag: "varint,104,opt,name=no_default_int64,json=noDefaultInt64", + Filename: "test.proto", +} + +var E_NoDefaultUint32 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*uint32)(nil), + Field: 105, + Name: "testdata.no_default_uint32", + Tag: "varint,105,opt,name=no_default_uint32,json=noDefaultUint32", + Filename: "test.proto", +} + +var E_NoDefaultUint64 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*uint64)(nil), + Field: 106, + Name: "testdata.no_default_uint64", + Tag: "varint,106,opt,name=no_default_uint64,json=noDefaultUint64", + Filename: "test.proto", +} + +var E_NoDefaultSint32 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int32)(nil), + Field: 107, + Name: "testdata.no_default_sint32", + Tag: "zigzag32,107,opt,name=no_default_sint32,json=noDefaultSint32", + Filename: "test.proto", +} + +var E_NoDefaultSint64 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int64)(nil), + Field: 108, + Name: "testdata.no_default_sint64", + Tag: "zigzag64,108,opt,name=no_default_sint64,json=noDefaultSint64", + Filename: "test.proto", +} + +var E_NoDefaultFixed32 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*uint32)(nil), + Field: 109, + Name: "testdata.no_default_fixed32", + Tag: "fixed32,109,opt,name=no_default_fixed32,json=noDefaultFixed32", + Filename: "test.proto", +} + +var E_NoDefaultFixed64 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*uint64)(nil), + Field: 110, + Name: "testdata.no_default_fixed64", + Tag: "fixed64,110,opt,name=no_default_fixed64,json=noDefaultFixed64", + Filename: "test.proto", +} + +var E_NoDefaultSfixed32 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int32)(nil), + Field: 111, + Name: "testdata.no_default_sfixed32", + Tag: "fixed32,111,opt,name=no_default_sfixed32,json=noDefaultSfixed32", + Filename: "test.proto", +} + +var E_NoDefaultSfixed64 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int64)(nil), + Field: 112, + Name: "testdata.no_default_sfixed64", + Tag: "fixed64,112,opt,name=no_default_sfixed64,json=noDefaultSfixed64", + Filename: "test.proto", +} + +var E_NoDefaultBool = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*bool)(nil), + Field: 113, + Name: "testdata.no_default_bool", + Tag: "varint,113,opt,name=no_default_bool,json=noDefaultBool", + Filename: "test.proto", +} + +var E_NoDefaultString = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*string)(nil), + Field: 114, + Name: "testdata.no_default_string", + Tag: "bytes,114,opt,name=no_default_string,json=noDefaultString", + Filename: "test.proto", +} + +var E_NoDefaultBytes = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: ([]byte)(nil), + Field: 115, + Name: "testdata.no_default_bytes", + Tag: "bytes,115,opt,name=no_default_bytes,json=noDefaultBytes", + Filename: "test.proto", +} + +var E_NoDefaultEnum = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*DefaultsMessage_DefaultsEnum)(nil), + Field: 116, + Name: "testdata.no_default_enum", + Tag: "varint,116,opt,name=no_default_enum,json=noDefaultEnum,enum=testdata.DefaultsMessage_DefaultsEnum", + Filename: "test.proto", +} + +var E_DefaultDouble = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*float64)(nil), + Field: 201, + Name: "testdata.default_double", + Tag: "fixed64,201,opt,name=default_double,json=defaultDouble,def=3.1415", + Filename: "test.proto", +} + +var E_DefaultFloat = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*float32)(nil), + Field: 202, + Name: "testdata.default_float", + Tag: "fixed32,202,opt,name=default_float,json=defaultFloat,def=3.14", + Filename: "test.proto", +} + +var E_DefaultInt32 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int32)(nil), + Field: 203, + Name: "testdata.default_int32", + Tag: "varint,203,opt,name=default_int32,json=defaultInt32,def=42", + Filename: "test.proto", +} + +var E_DefaultInt64 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int64)(nil), + Field: 204, + Name: "testdata.default_int64", + Tag: "varint,204,opt,name=default_int64,json=defaultInt64,def=43", + Filename: "test.proto", +} + +var E_DefaultUint32 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*uint32)(nil), + Field: 205, + Name: "testdata.default_uint32", + Tag: "varint,205,opt,name=default_uint32,json=defaultUint32,def=44", + Filename: "test.proto", +} + +var E_DefaultUint64 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*uint64)(nil), + Field: 206, + Name: "testdata.default_uint64", + Tag: "varint,206,opt,name=default_uint64,json=defaultUint64,def=45", + Filename: "test.proto", +} + +var E_DefaultSint32 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int32)(nil), + Field: 207, + Name: "testdata.default_sint32", + Tag: "zigzag32,207,opt,name=default_sint32,json=defaultSint32,def=46", + Filename: "test.proto", +} + +var E_DefaultSint64 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int64)(nil), + Field: 208, + Name: "testdata.default_sint64", + Tag: "zigzag64,208,opt,name=default_sint64,json=defaultSint64,def=47", + Filename: "test.proto", +} + +var E_DefaultFixed32 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*uint32)(nil), + Field: 209, + Name: "testdata.default_fixed32", + Tag: "fixed32,209,opt,name=default_fixed32,json=defaultFixed32,def=48", + Filename: "test.proto", +} + +var E_DefaultFixed64 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*uint64)(nil), + Field: 210, + Name: "testdata.default_fixed64", + Tag: "fixed64,210,opt,name=default_fixed64,json=defaultFixed64,def=49", + Filename: "test.proto", +} + +var E_DefaultSfixed32 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int32)(nil), + Field: 211, + Name: "testdata.default_sfixed32", + Tag: "fixed32,211,opt,name=default_sfixed32,json=defaultSfixed32,def=50", + Filename: "test.proto", +} + +var E_DefaultSfixed64 = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*int64)(nil), + Field: 212, + Name: "testdata.default_sfixed64", + Tag: "fixed64,212,opt,name=default_sfixed64,json=defaultSfixed64,def=51", + Filename: "test.proto", +} + +var E_DefaultBool = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*bool)(nil), + Field: 213, + Name: "testdata.default_bool", + Tag: "varint,213,opt,name=default_bool,json=defaultBool,def=1", + Filename: "test.proto", +} + +var E_DefaultString = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*string)(nil), + Field: 214, + Name: "testdata.default_string", + Tag: "bytes,214,opt,name=default_string,json=defaultString,def=Hello, string", + Filename: "test.proto", +} + +var E_DefaultBytes = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: ([]byte)(nil), + Field: 215, + Name: "testdata.default_bytes", + Tag: "bytes,215,opt,name=default_bytes,json=defaultBytes,def=Hello, bytes", + Filename: "test.proto", +} + +var E_DefaultEnum = &proto.ExtensionDesc{ + ExtendedType: (*DefaultsMessage)(nil), + ExtensionType: (*DefaultsMessage_DefaultsEnum)(nil), + Field: 216, + Name: "testdata.default_enum", + Tag: "varint,216,opt,name=default_enum,json=defaultEnum,enum=testdata.DefaultsMessage_DefaultsEnum,def=1", + Filename: "test.proto", +} + +var E_X201 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 201, + Name: "testdata.x201", + Tag: "bytes,201,opt,name=x201", + Filename: "test.proto", +} + +var E_X202 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 202, + Name: "testdata.x202", + Tag: "bytes,202,opt,name=x202", + Filename: "test.proto", +} + +var E_X203 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 203, + Name: "testdata.x203", + Tag: "bytes,203,opt,name=x203", + Filename: "test.proto", +} + +var E_X204 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 204, + Name: "testdata.x204", + Tag: "bytes,204,opt,name=x204", + Filename: "test.proto", +} + +var E_X205 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 205, + Name: "testdata.x205", + Tag: "bytes,205,opt,name=x205", + Filename: "test.proto", +} + +var E_X206 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 206, + Name: "testdata.x206", + Tag: "bytes,206,opt,name=x206", + Filename: "test.proto", +} + +var E_X207 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 207, + Name: "testdata.x207", + Tag: "bytes,207,opt,name=x207", + Filename: "test.proto", +} + +var E_X208 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 208, + Name: "testdata.x208", + Tag: "bytes,208,opt,name=x208", + Filename: "test.proto", +} + +var E_X209 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 209, + Name: "testdata.x209", + Tag: "bytes,209,opt,name=x209", + Filename: "test.proto", +} + +var E_X210 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 210, + Name: "testdata.x210", + Tag: "bytes,210,opt,name=x210", + Filename: "test.proto", +} + +var E_X211 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 211, + Name: "testdata.x211", + Tag: "bytes,211,opt,name=x211", + Filename: "test.proto", +} + +var E_X212 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 212, + Name: "testdata.x212", + Tag: "bytes,212,opt,name=x212", + Filename: "test.proto", +} + +var E_X213 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 213, + Name: "testdata.x213", + Tag: "bytes,213,opt,name=x213", + Filename: "test.proto", +} + +var E_X214 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 214, + Name: "testdata.x214", + Tag: "bytes,214,opt,name=x214", + Filename: "test.proto", +} + +var E_X215 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 215, + Name: "testdata.x215", + Tag: "bytes,215,opt,name=x215", + Filename: "test.proto", +} + +var E_X216 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 216, + Name: "testdata.x216", + Tag: "bytes,216,opt,name=x216", + Filename: "test.proto", +} + +var E_X217 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 217, + Name: "testdata.x217", + Tag: "bytes,217,opt,name=x217", + Filename: "test.proto", +} + +var E_X218 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 218, + Name: "testdata.x218", + Tag: "bytes,218,opt,name=x218", + Filename: "test.proto", +} + +var E_X219 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 219, + Name: "testdata.x219", + Tag: "bytes,219,opt,name=x219", + Filename: "test.proto", +} + +var E_X220 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 220, + Name: "testdata.x220", + Tag: "bytes,220,opt,name=x220", + Filename: "test.proto", +} + +var E_X221 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 221, + Name: "testdata.x221", + Tag: "bytes,221,opt,name=x221", + Filename: "test.proto", +} + +var E_X222 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 222, + Name: "testdata.x222", + Tag: "bytes,222,opt,name=x222", + Filename: "test.proto", +} + +var E_X223 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 223, + Name: "testdata.x223", + Tag: "bytes,223,opt,name=x223", + Filename: "test.proto", +} + +var E_X224 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 224, + Name: "testdata.x224", + Tag: "bytes,224,opt,name=x224", + Filename: "test.proto", +} + +var E_X225 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 225, + Name: "testdata.x225", + Tag: "bytes,225,opt,name=x225", + Filename: "test.proto", +} + +var E_X226 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 226, + Name: "testdata.x226", + Tag: "bytes,226,opt,name=x226", + Filename: "test.proto", +} + +var E_X227 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 227, + Name: "testdata.x227", + Tag: "bytes,227,opt,name=x227", + Filename: "test.proto", +} + +var E_X228 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 228, + Name: "testdata.x228", + Tag: "bytes,228,opt,name=x228", + Filename: "test.proto", +} + +var E_X229 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 229, + Name: "testdata.x229", + Tag: "bytes,229,opt,name=x229", + Filename: "test.proto", +} + +var E_X230 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 230, + Name: "testdata.x230", + Tag: "bytes,230,opt,name=x230", + Filename: "test.proto", +} + +var E_X231 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 231, + Name: "testdata.x231", + Tag: "bytes,231,opt,name=x231", + Filename: "test.proto", +} + +var E_X232 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 232, + Name: "testdata.x232", + Tag: "bytes,232,opt,name=x232", + Filename: "test.proto", +} + +var E_X233 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 233, + Name: "testdata.x233", + Tag: "bytes,233,opt,name=x233", + Filename: "test.proto", +} + +var E_X234 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 234, + Name: "testdata.x234", + Tag: "bytes,234,opt,name=x234", + Filename: "test.proto", +} + +var E_X235 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 235, + Name: "testdata.x235", + Tag: "bytes,235,opt,name=x235", + Filename: "test.proto", +} + +var E_X236 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 236, + Name: "testdata.x236", + Tag: "bytes,236,opt,name=x236", + Filename: "test.proto", +} + +var E_X237 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 237, + Name: "testdata.x237", + Tag: "bytes,237,opt,name=x237", + Filename: "test.proto", +} + +var E_X238 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 238, + Name: "testdata.x238", + Tag: "bytes,238,opt,name=x238", + Filename: "test.proto", +} + +var E_X239 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 239, + Name: "testdata.x239", + Tag: "bytes,239,opt,name=x239", + Filename: "test.proto", +} + +var E_X240 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 240, + Name: "testdata.x240", + Tag: "bytes,240,opt,name=x240", + Filename: "test.proto", +} + +var E_X241 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 241, + Name: "testdata.x241", + Tag: "bytes,241,opt,name=x241", + Filename: "test.proto", +} + +var E_X242 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 242, + Name: "testdata.x242", + Tag: "bytes,242,opt,name=x242", + Filename: "test.proto", +} + +var E_X243 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 243, + Name: "testdata.x243", + Tag: "bytes,243,opt,name=x243", + Filename: "test.proto", +} + +var E_X244 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 244, + Name: "testdata.x244", + Tag: "bytes,244,opt,name=x244", + Filename: "test.proto", +} + +var E_X245 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 245, + Name: "testdata.x245", + Tag: "bytes,245,opt,name=x245", + Filename: "test.proto", +} + +var E_X246 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 246, + Name: "testdata.x246", + Tag: "bytes,246,opt,name=x246", + Filename: "test.proto", +} + +var E_X247 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 247, + Name: "testdata.x247", + Tag: "bytes,247,opt,name=x247", + Filename: "test.proto", +} + +var E_X248 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 248, + Name: "testdata.x248", + Tag: "bytes,248,opt,name=x248", + Filename: "test.proto", +} + +var E_X249 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 249, + Name: "testdata.x249", + Tag: "bytes,249,opt,name=x249", + Filename: "test.proto", +} + +var E_X250 = &proto.ExtensionDesc{ + ExtendedType: (*MyMessageSet)(nil), + ExtensionType: (*Empty)(nil), + Field: 250, + Name: "testdata.x250", + Tag: "bytes,250,opt,name=x250", + Filename: "test.proto", +} + +func init() { + proto.RegisterType((*GoEnum)(nil), "testdata.GoEnum") + proto.RegisterType((*GoTestField)(nil), "testdata.GoTestField") + proto.RegisterType((*GoTest)(nil), "testdata.GoTest") + proto.RegisterType((*GoTest_RequiredGroup)(nil), "testdata.GoTest.RequiredGroup") + proto.RegisterType((*GoTest_RepeatedGroup)(nil), "testdata.GoTest.RepeatedGroup") + proto.RegisterType((*GoTest_OptionalGroup)(nil), "testdata.GoTest.OptionalGroup") + proto.RegisterType((*GoTestRequiredGroupField)(nil), "testdata.GoTestRequiredGroupField") + proto.RegisterType((*GoTestRequiredGroupField_Group)(nil), "testdata.GoTestRequiredGroupField.Group") + proto.RegisterType((*GoSkipTest)(nil), "testdata.GoSkipTest") + proto.RegisterType((*GoSkipTest_SkipGroup)(nil), "testdata.GoSkipTest.SkipGroup") + proto.RegisterType((*NonPackedTest)(nil), "testdata.NonPackedTest") + proto.RegisterType((*PackedTest)(nil), "testdata.PackedTest") + proto.RegisterType((*MaxTag)(nil), "testdata.MaxTag") + proto.RegisterType((*OldMessage)(nil), "testdata.OldMessage") + proto.RegisterType((*OldMessage_Nested)(nil), "testdata.OldMessage.Nested") + proto.RegisterType((*NewMessage)(nil), "testdata.NewMessage") + proto.RegisterType((*NewMessage_Nested)(nil), "testdata.NewMessage.Nested") + proto.RegisterType((*InnerMessage)(nil), "testdata.InnerMessage") + proto.RegisterType((*OtherMessage)(nil), "testdata.OtherMessage") + proto.RegisterType((*RequiredInnerMessage)(nil), "testdata.RequiredInnerMessage") + proto.RegisterType((*MyMessage)(nil), "testdata.MyMessage") + proto.RegisterType((*MyMessage_SomeGroup)(nil), "testdata.MyMessage.SomeGroup") + proto.RegisterType((*Ext)(nil), "testdata.Ext") + proto.RegisterType((*ComplexExtension)(nil), "testdata.ComplexExtension") + proto.RegisterType((*DefaultsMessage)(nil), "testdata.DefaultsMessage") + proto.RegisterType((*MyMessageSet)(nil), "testdata.MyMessageSet") + proto.RegisterType((*Empty)(nil), "testdata.Empty") + proto.RegisterType((*MessageList)(nil), "testdata.MessageList") + proto.RegisterType((*MessageList_Message)(nil), "testdata.MessageList.Message") + proto.RegisterType((*Strings)(nil), "testdata.Strings") + proto.RegisterType((*Defaults)(nil), "testdata.Defaults") + proto.RegisterType((*SubDefaults)(nil), "testdata.SubDefaults") + proto.RegisterType((*RepeatedEnum)(nil), "testdata.RepeatedEnum") + proto.RegisterType((*MoreRepeated)(nil), "testdata.MoreRepeated") + proto.RegisterType((*GroupOld)(nil), "testdata.GroupOld") + proto.RegisterType((*GroupOld_G)(nil), "testdata.GroupOld.G") + proto.RegisterType((*GroupNew)(nil), "testdata.GroupNew") + proto.RegisterType((*GroupNew_G)(nil), "testdata.GroupNew.G") + proto.RegisterType((*FloatingPoint)(nil), "testdata.FloatingPoint") + proto.RegisterType((*MessageWithMap)(nil), "testdata.MessageWithMap") + proto.RegisterType((*Oneof)(nil), "testdata.Oneof") + proto.RegisterType((*Oneof_F_Group)(nil), "testdata.Oneof.F_Group") + proto.RegisterType((*Communique)(nil), "testdata.Communique") + proto.RegisterEnum("testdata.FOO", FOO_name, FOO_value) + proto.RegisterEnum("testdata.GoTest_KIND", GoTest_KIND_name, GoTest_KIND_value) + proto.RegisterEnum("testdata.MyMessage_Color", MyMessage_Color_name, MyMessage_Color_value) + proto.RegisterEnum("testdata.DefaultsMessage_DefaultsEnum", DefaultsMessage_DefaultsEnum_name, DefaultsMessage_DefaultsEnum_value) + proto.RegisterEnum("testdata.Defaults_Color", Defaults_Color_name, Defaults_Color_value) + proto.RegisterEnum("testdata.RepeatedEnum_Color", RepeatedEnum_Color_name, RepeatedEnum_Color_value) + proto.RegisterExtension(E_Ext_More) + proto.RegisterExtension(E_Ext_Text) + proto.RegisterExtension(E_Ext_Number) + proto.RegisterExtension(E_Greeting) + proto.RegisterExtension(E_Complex) + proto.RegisterExtension(E_RComplex) + proto.RegisterExtension(E_NoDefaultDouble) + proto.RegisterExtension(E_NoDefaultFloat) + proto.RegisterExtension(E_NoDefaultInt32) + proto.RegisterExtension(E_NoDefaultInt64) + proto.RegisterExtension(E_NoDefaultUint32) + proto.RegisterExtension(E_NoDefaultUint64) + proto.RegisterExtension(E_NoDefaultSint32) + proto.RegisterExtension(E_NoDefaultSint64) + proto.RegisterExtension(E_NoDefaultFixed32) + proto.RegisterExtension(E_NoDefaultFixed64) + proto.RegisterExtension(E_NoDefaultSfixed32) + proto.RegisterExtension(E_NoDefaultSfixed64) + proto.RegisterExtension(E_NoDefaultBool) + proto.RegisterExtension(E_NoDefaultString) + proto.RegisterExtension(E_NoDefaultBytes) + proto.RegisterExtension(E_NoDefaultEnum) + proto.RegisterExtension(E_DefaultDouble) + proto.RegisterExtension(E_DefaultFloat) + proto.RegisterExtension(E_DefaultInt32) + proto.RegisterExtension(E_DefaultInt64) + proto.RegisterExtension(E_DefaultUint32) + proto.RegisterExtension(E_DefaultUint64) + proto.RegisterExtension(E_DefaultSint32) + proto.RegisterExtension(E_DefaultSint64) + proto.RegisterExtension(E_DefaultFixed32) + proto.RegisterExtension(E_DefaultFixed64) + proto.RegisterExtension(E_DefaultSfixed32) + proto.RegisterExtension(E_DefaultSfixed64) + proto.RegisterExtension(E_DefaultBool) + proto.RegisterExtension(E_DefaultString) + proto.RegisterExtension(E_DefaultBytes) + proto.RegisterExtension(E_DefaultEnum) + proto.RegisterExtension(E_X201) + proto.RegisterExtension(E_X202) + proto.RegisterExtension(E_X203) + proto.RegisterExtension(E_X204) + proto.RegisterExtension(E_X205) + proto.RegisterExtension(E_X206) + proto.RegisterExtension(E_X207) + proto.RegisterExtension(E_X208) + proto.RegisterExtension(E_X209) + proto.RegisterExtension(E_X210) + proto.RegisterExtension(E_X211) + proto.RegisterExtension(E_X212) + proto.RegisterExtension(E_X213) + proto.RegisterExtension(E_X214) + proto.RegisterExtension(E_X215) + proto.RegisterExtension(E_X216) + proto.RegisterExtension(E_X217) + proto.RegisterExtension(E_X218) + proto.RegisterExtension(E_X219) + proto.RegisterExtension(E_X220) + proto.RegisterExtension(E_X221) + proto.RegisterExtension(E_X222) + proto.RegisterExtension(E_X223) + proto.RegisterExtension(E_X224) + proto.RegisterExtension(E_X225) + proto.RegisterExtension(E_X226) + proto.RegisterExtension(E_X227) + proto.RegisterExtension(E_X228) + proto.RegisterExtension(E_X229) + proto.RegisterExtension(E_X230) + proto.RegisterExtension(E_X231) + proto.RegisterExtension(E_X232) + proto.RegisterExtension(E_X233) + proto.RegisterExtension(E_X234) + proto.RegisterExtension(E_X235) + proto.RegisterExtension(E_X236) + proto.RegisterExtension(E_X237) + proto.RegisterExtension(E_X238) + proto.RegisterExtension(E_X239) + proto.RegisterExtension(E_X240) + proto.RegisterExtension(E_X241) + proto.RegisterExtension(E_X242) + proto.RegisterExtension(E_X243) + proto.RegisterExtension(E_X244) + proto.RegisterExtension(E_X245) + proto.RegisterExtension(E_X246) + proto.RegisterExtension(E_X247) + proto.RegisterExtension(E_X248) + proto.RegisterExtension(E_X249) + proto.RegisterExtension(E_X250) +} + +func init() { proto.RegisterFile("test.proto", fileDescriptor0) } + +var fileDescriptor0 = []byte{ + // 4453 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x5a, 0xc9, 0x77, 0xdb, 0x48, + 0x7a, 0x37, 0xc0, 0xfd, 0x23, 0x25, 0x42, 0x65, 0xb5, 0x9b, 0x96, 0xbc, 0xc0, 0x9c, 0xe9, 0x6e, + 0x7a, 0xd3, 0x48, 0x20, 0x44, 0xdb, 0x74, 0xa7, 0xdf, 0xf3, 0x42, 0xca, 0x7a, 0x63, 0x89, 0x0a, + 0xa4, 0xee, 0x7e, 0xd3, 0x39, 0xf0, 0x51, 0x22, 0x44, 0xb3, 0x4d, 0x02, 0x34, 0x09, 0xc5, 0x52, + 0x72, 0xe9, 0x4b, 0x72, 0xcd, 0x76, 0xc9, 0x35, 0xa7, 0x9c, 0x92, 0xbc, 0x97, 0x7f, 0x22, 0xe9, + 0xee, 0x59, 0x7b, 0xd6, 0xac, 0x93, 0x7d, 0x99, 0xec, 0xdb, 0x4c, 0x92, 0x4b, 0xcf, 0xab, 0xaf, + 0x0a, 0x40, 0x01, 0x24, 0x20, 0xf9, 0x24, 0x56, 0xd5, 0xef, 0xf7, 0xd5, 0xf6, 0xab, 0xef, 0xab, + 0xaf, 0x20, 0x00, 0xc7, 0x9c, 0x38, 0x2b, 0xa3, 0xb1, 0xed, 0xd8, 0x24, 0x4b, 0x7f, 0x77, 0x3b, + 0x4e, 0xa7, 0x7c, 0x1d, 0xd2, 0x1b, 0x76, 0xc3, 0x3a, 0x1a, 0x92, 0xab, 0x90, 0x38, 0xb4, 0xed, + 0x92, 0xa4, 0xca, 0x95, 0x79, 0x6d, 0x6e, 0xc5, 0x45, 0xac, 0x34, 0x5b, 0x2d, 0x83, 0xb6, 0x94, + 0xef, 0x40, 0x7e, 0xc3, 0xde, 0x33, 0x27, 0x4e, 0xb3, 0x6f, 0x0e, 0xba, 0x64, 0x11, 0x52, 0x4f, + 0x3b, 0xfb, 0xe6, 0x00, 0x19, 0x39, 0x83, 0x15, 0x08, 0x81, 0xe4, 0xde, 0xc9, 0xc8, 0x2c, 0xc9, + 0x58, 0x89, 0xbf, 0xcb, 0xbf, 0x72, 0x85, 0x76, 0x42, 0x99, 0xe4, 0x3a, 0x24, 0xbf, 0xdc, 0xb7, + 0xba, 0xbc, 0x97, 0xd7, 0xfc, 0x5e, 0x58, 0xfb, 0xca, 0x97, 0x37, 0xb7, 0x1f, 0x1b, 0x08, 0xa1, + 0xf6, 0xf7, 0x3a, 0xfb, 0x03, 0x6a, 0x4a, 0xa2, 0xf6, 0xb1, 0x40, 0x6b, 0x77, 0x3a, 0xe3, 0xce, + 0xb0, 0x94, 0x50, 0xa5, 0x4a, 0xca, 0x60, 0x05, 0x72, 0x1f, 0xe6, 0x0c, 0xf3, 0xc5, 0x51, 0x7f, + 0x6c, 0x76, 0x71, 0x70, 0xa5, 0xa4, 0x2a, 0x57, 0xf2, 0xd3, 0xf6, 0xb1, 0xd1, 0x08, 0x62, 0x19, + 0x79, 0x64, 0x76, 0x1c, 0x97, 0x9c, 0x52, 0x13, 0xb1, 0x64, 0x01, 0x4b, 0xc9, 0xad, 0x91, 0xd3, + 0xb7, 0xad, 0xce, 0x80, 0x91, 0xd3, 0xaa, 0x14, 0x43, 0x0e, 0x60, 0xc9, 0x9b, 0x50, 0x6c, 0xb6, + 0x1f, 0xda, 0xf6, 0xa0, 0x3d, 0xe6, 0x23, 0x2a, 0x81, 0x2a, 0x57, 0xb2, 0xc6, 0x5c, 0x93, 0xd6, + 0xba, 0xc3, 0x24, 0x15, 0x50, 0x9a, 0xed, 0x4d, 0xcb, 0xa9, 0x6a, 0x3e, 0x30, 0xaf, 0xca, 0x95, + 0x94, 0x31, 0xdf, 0xc4, 0xea, 0x29, 0x64, 0x4d, 0xf7, 0x91, 0x05, 0x55, 0xae, 0x24, 0x18, 0xb2, + 0xa6, 0x7b, 0xc8, 0x5b, 0x40, 0x9a, 0xed, 0x66, 0xff, 0xd8, 0xec, 0x8a, 0x56, 0xe7, 0x54, 0xb9, + 0x92, 0x31, 0x94, 0x26, 0x6f, 0x98, 0x81, 0x16, 0x2d, 0xcf, 0xab, 0x72, 0x25, 0xed, 0xa2, 0x05, + 0xdb, 0x37, 0x60, 0xa1, 0xd9, 0x7e, 0xb7, 0x1f, 0x1c, 0x70, 0x51, 0x95, 0x2b, 0x73, 0x46, 0xb1, + 0xc9, 0xea, 0xa7, 0xb1, 0xa2, 0x61, 0x45, 0x95, 0x2b, 0x49, 0x8e, 0x15, 0xec, 0xe2, 0xec, 0x9a, + 0x03, 0xbb, 0xe3, 0xf8, 0xd0, 0x05, 0x55, 0xae, 0xc8, 0xc6, 0x7c, 0x13, 0xab, 0x83, 0x56, 0x1f, + 0xdb, 0x47, 0xfb, 0x03, 0xd3, 0x87, 0x12, 0x55, 0xae, 0x48, 0x46, 0xb1, 0xc9, 0xea, 0x83, 0xd8, + 0x5d, 0x67, 0xdc, 0xb7, 0x7a, 0x3e, 0xf6, 0x3c, 0xea, 0xb7, 0xd8, 0x64, 0xf5, 0xc1, 0x11, 0x3c, + 0x3c, 0x71, 0xcc, 0x89, 0x0f, 0x35, 0x55, 0xb9, 0x52, 0x30, 0xe6, 0x9b, 0x58, 0x1d, 0xb2, 0x1a, + 0x5a, 0x83, 0x43, 0x55, 0xae, 0x2c, 0x50, 0xab, 0x33, 0xd6, 0x60, 0x37, 0xb4, 0x06, 0x3d, 0x55, + 0xae, 0x10, 0x8e, 0x15, 0xd6, 0x40, 0xd4, 0x0c, 0x13, 0x62, 0x69, 0x51, 0x4d, 0x08, 0x9a, 0x61, + 0x95, 0x41, 0xcd, 0x70, 0xe0, 0x6b, 0x6a, 0x42, 0xd4, 0x4c, 0x08, 0x89, 0x9d, 0x73, 0xe4, 0x05, + 0x35, 0x21, 0x6a, 0x86, 0x23, 0x43, 0x9a, 0xe1, 0xd8, 0xd7, 0xd5, 0x44, 0x50, 0x33, 0x53, 0x68, + 0xd1, 0x72, 0x49, 0x4d, 0x04, 0x35, 0xc3, 0xd1, 0x41, 0xcd, 0x70, 0xf0, 0x45, 0x35, 0x11, 0xd0, + 0x4c, 0x18, 0x2b, 0x1a, 0x5e, 0x52, 0x13, 0x01, 0xcd, 0x88, 0xb3, 0x73, 0x35, 0xc3, 0xa1, 0xcb, + 0x6a, 0x42, 0xd4, 0x8c, 0x68, 0xd5, 0xd3, 0x0c, 0x87, 0x5e, 0x52, 0x13, 0x01, 0xcd, 0x88, 0x58, + 0x4f, 0x33, 0x1c, 0x7b, 0x59, 0x4d, 0x04, 0x34, 0xc3, 0xb1, 0xd7, 0x45, 0xcd, 0x70, 0xe8, 0xc7, + 0x92, 0x9a, 0x10, 0x45, 0xc3, 0xa1, 0x37, 0x03, 0xa2, 0xe1, 0xd8, 0x4f, 0x28, 0x56, 0x54, 0x4d, + 0x18, 0x2c, 0xae, 0xc2, 0xa7, 0x14, 0x2c, 0xca, 0x86, 0x83, 0x7d, 0xd9, 0xd8, 0xdc, 0x05, 0x95, + 0xae, 0xa8, 0x92, 0x27, 0x1b, 0xd7, 0x2f, 0x89, 0xb2, 0xf1, 0x80, 0x57, 0xd1, 0xd5, 0x72, 0xd9, + 0x4c, 0x21, 0x6b, 0xba, 0x8f, 0x54, 0x55, 0xc9, 0x97, 0x8d, 0x87, 0x0c, 0xc8, 0xc6, 0xc3, 0x5e, + 0x53, 0x25, 0x51, 0x36, 0x33, 0xd0, 0xa2, 0xe5, 0xb2, 0x2a, 0x89, 0xb2, 0xf1, 0xd0, 0xa2, 0x6c, + 0x3c, 0xf0, 0x17, 0x54, 0x49, 0x90, 0xcd, 0x34, 0x56, 0x34, 0xfc, 0x45, 0x55, 0x12, 0x64, 0x13, + 0x9c, 0x1d, 0x93, 0x8d, 0x07, 0x7d, 0x43, 0x95, 0x7c, 0xd9, 0x04, 0xad, 0x72, 0xd9, 0x78, 0xd0, + 0x37, 0x55, 0x49, 0x90, 0x4d, 0x10, 0xcb, 0x65, 0xe3, 0x61, 0xdf, 0xc2, 0xf8, 0xe6, 0xca, 0xc6, + 0xc3, 0x0a, 0xb2, 0xf1, 0xa0, 0xbf, 0x43, 0x63, 0xa1, 0x27, 0x1b, 0x0f, 0x2a, 0xca, 0xc6, 0xc3, + 0xfe, 0x2e, 0xc5, 0xfa, 0xb2, 0x99, 0x06, 0x8b, 0xab, 0xf0, 0x7b, 0x14, 0xec, 0xcb, 0xc6, 0x03, + 0xaf, 0xe0, 0x20, 0xa8, 0x6c, 0xba, 0xe6, 0x61, 0xe7, 0x68, 0x40, 0x25, 0x56, 0xa1, 0xba, 0xa9, + 0x27, 0x9d, 0xf1, 0x91, 0x49, 0x47, 0x62, 0xdb, 0x83, 0xc7, 0x6e, 0x1b, 0x59, 0xa1, 0xc6, 0x99, + 0x7c, 0x7c, 0xc2, 0x75, 0xaa, 0x9f, 0xba, 0x5c, 0xd5, 0x8c, 0x22, 0xd3, 0xd0, 0x34, 0xbe, 0xa6, + 0x0b, 0xf8, 0x1b, 0x54, 0x45, 0x75, 0xb9, 0xa6, 0x33, 0x7c, 0x4d, 0xf7, 0xf1, 0x55, 0x38, 0xef, + 0x4b, 0xc9, 0x67, 0xdc, 0xa4, 0x5a, 0xaa, 0x27, 0xaa, 0xda, 0xaa, 0xb1, 0xe0, 0x0a, 0x6a, 0x16, + 0x29, 0xd0, 0xcd, 0x2d, 0x2a, 0xa9, 0x7a, 0xa2, 0xa6, 0x7b, 0x24, 0xb1, 0x27, 0x8d, 0xca, 0x90, + 0x0b, 0xcb, 0xe7, 0xdc, 0xa6, 0xca, 0xaa, 0x27, 0xab, 0xda, 0xea, 0xaa, 0xa1, 0x70, 0x7d, 0xcd, + 0xe0, 0x04, 0xfa, 0x59, 0xa1, 0x0a, 0xab, 0x27, 0x6b, 0xba, 0xc7, 0x09, 0xf6, 0xb3, 0xe0, 0x0a, + 0xcd, 0xa7, 0x7c, 0x89, 0x2a, 0xad, 0x9e, 0xae, 0xae, 0xe9, 0x6b, 0xeb, 0xf7, 0x8c, 0x22, 0x53, + 0x9c, 0xcf, 0xd1, 0x69, 0x3f, 0x5c, 0x72, 0x3e, 0x69, 0x95, 0x6a, 0xae, 0x9e, 0xd6, 0xee, 0xac, + 0xdd, 0xd5, 0xee, 0x1a, 0x0a, 0xd7, 0x9e, 0xcf, 0x7a, 0x87, 0xb2, 0xb8, 0xf8, 0x7c, 0xd6, 0x1a, + 0x55, 0x5f, 0x5d, 0x79, 0x66, 0x0e, 0x06, 0xf6, 0x2d, 0xb5, 0xfc, 0xd2, 0x1e, 0x0f, 0xba, 0xd7, + 0xca, 0x60, 0x28, 0x5c, 0x8f, 0x62, 0xaf, 0x0b, 0xae, 0x20, 0x7d, 0xfa, 0xaf, 0xd1, 0x7b, 0x58, + 0xa1, 0x9e, 0x79, 0xd8, 0xef, 0x59, 0xf6, 0xc4, 0x34, 0x8a, 0x4c, 0x9a, 0xa1, 0x35, 0xd9, 0x0d, + 0xaf, 0xe3, 0xaf, 0x53, 0xda, 0x42, 0x3d, 0x71, 0xbb, 0xaa, 0xd1, 0x9e, 0x66, 0xad, 0xe3, 0x6e, + 0x78, 0x1d, 0x7f, 0x83, 0x72, 0x48, 0x3d, 0x71, 0xbb, 0xa6, 0x73, 0x8e, 0xb8, 0x8e, 0x77, 0xe0, + 0x42, 0x28, 0x2e, 0xb6, 0x47, 0x9d, 0x83, 0xe7, 0x66, 0xb7, 0xa4, 0xd1, 0xf0, 0xf8, 0x50, 0x56, + 0x24, 0xe3, 0x7c, 0x20, 0x44, 0xee, 0x60, 0x33, 0xb9, 0x07, 0xaf, 0x87, 0x03, 0xa5, 0xcb, 0xac, + 0xd2, 0x78, 0x89, 0xcc, 0xc5, 0x60, 0xcc, 0x0c, 0x51, 0x05, 0x07, 0xec, 0x52, 0x75, 0x1a, 0x40, + 0x7d, 0xaa, 0xef, 0x89, 0x39, 0xf5, 0x67, 0xe0, 0xe2, 0x74, 0x28, 0x75, 0xc9, 0xeb, 0x34, 0xa2, + 0x22, 0xf9, 0x42, 0x38, 0xaa, 0x4e, 0xd1, 0x67, 0xf4, 0x5d, 0xa3, 0x21, 0x56, 0xa4, 0x4f, 0xf5, + 0x7e, 0x1f, 0x4a, 0x53, 0xc1, 0xd6, 0x65, 0xdf, 0xa1, 0x31, 0x17, 0xd9, 0xaf, 0x85, 0xe2, 0x6e, + 0x98, 0x3c, 0xa3, 0xeb, 0xbb, 0x34, 0x08, 0x0b, 0xe4, 0xa9, 0x9e, 0x71, 0xc9, 0x82, 0xe1, 0xd8, + 0xe5, 0xde, 0xa3, 0x51, 0x99, 0x2f, 0x59, 0x20, 0x32, 0x8b, 0xfd, 0x86, 0xe2, 0xb3, 0xcb, 0xad, + 0xd3, 0x30, 0xcd, 0xfb, 0x0d, 0x86, 0x6a, 0x4e, 0x7e, 0x9b, 0x92, 0x77, 0x67, 0xcf, 0xf8, 0xc7, + 0x09, 0x1a, 0x60, 0x39, 0x7b, 0x77, 0xd6, 0x94, 0x3d, 0xf6, 0x8c, 0x29, 0xff, 0x84, 0xb2, 0x89, + 0xc0, 0x9e, 0x9a, 0xf3, 0x63, 0x98, 0x73, 0x6f, 0x75, 0xbd, 0xb1, 0x7d, 0x34, 0x2a, 0x35, 0x55, + 0xb9, 0x02, 0xda, 0x95, 0xa9, 0xec, 0xc7, 0xbd, 0xe4, 0x6d, 0x50, 0x94, 0x11, 0x24, 0x31, 0x2b, + 0xcc, 0x2e, 0xb3, 0xb2, 0xa3, 0x26, 0x22, 0xac, 0x30, 0x94, 0x67, 0x45, 0x20, 0x51, 0x2b, 0xae, + 0xd3, 0x67, 0x56, 0x3e, 0x50, 0xa5, 0x99, 0x56, 0xdc, 0x10, 0xc0, 0xad, 0x04, 0x48, 0x4b, 0xeb, + 0x7e, 0xbe, 0x85, 0xed, 0xe4, 0x8b, 0xe1, 0x04, 0x6c, 0x03, 0xef, 0xcf, 0xc1, 0x4a, 0x46, 0x13, + 0x06, 0x37, 0x4d, 0xfb, 0xd9, 0x08, 0x5a, 0x60, 0x34, 0xd3, 0xb4, 0x9f, 0x9b, 0x41, 0x2b, 0xff, + 0xa6, 0x04, 0x49, 0x9a, 0x4f, 0x92, 0x2c, 0x24, 0xdf, 0x6b, 0x6d, 0x3e, 0x56, 0xce, 0xd1, 0x5f, + 0x0f, 0x5b, 0xad, 0xa7, 0x8a, 0x44, 0x72, 0x90, 0x7a, 0xf8, 0x95, 0xbd, 0xc6, 0xae, 0x22, 0x93, + 0x22, 0xe4, 0x9b, 0x9b, 0xdb, 0x1b, 0x0d, 0x63, 0xc7, 0xd8, 0xdc, 0xde, 0x53, 0x12, 0xb4, 0xad, + 0xf9, 0xb4, 0xf5, 0x60, 0x4f, 0x49, 0x92, 0x0c, 0x24, 0x68, 0x5d, 0x8a, 0x00, 0xa4, 0x77, 0xf7, + 0x8c, 0xcd, 0xed, 0x0d, 0x25, 0x4d, 0xad, 0xec, 0x6d, 0x6e, 0x35, 0x94, 0x0c, 0x45, 0xee, 0xbd, + 0xbb, 0xf3, 0xb4, 0xa1, 0x64, 0xe9, 0xcf, 0x07, 0x86, 0xf1, 0xe0, 0x2b, 0x4a, 0x8e, 0x92, 0xb6, + 0x1e, 0xec, 0x28, 0x80, 0xcd, 0x0f, 0x1e, 0x3e, 0x6d, 0x28, 0x79, 0x52, 0x80, 0x6c, 0xf3, 0xdd, + 0xed, 0x47, 0x7b, 0x9b, 0xad, 0x6d, 0xa5, 0x50, 0x3e, 0x81, 0x12, 0x5b, 0xe6, 0xc0, 0x2a, 0xb2, + 0xa4, 0xf0, 0x1d, 0x48, 0xb1, 0x9d, 0x91, 0x50, 0x25, 0x95, 0xf0, 0xce, 0x4c, 0x53, 0x56, 0xd8, + 0x1e, 0x31, 0xda, 0xd2, 0x65, 0x48, 0xb1, 0x55, 0x5a, 0x84, 0x14, 0x5b, 0x1d, 0x19, 0x53, 0x45, + 0x56, 0x28, 0xff, 0x96, 0x0c, 0xb0, 0x61, 0xef, 0x3e, 0xef, 0x8f, 0x30, 0x21, 0xbf, 0x0c, 0x30, + 0x79, 0xde, 0x1f, 0xb5, 0x51, 0xf5, 0x3c, 0xa9, 0xcc, 0xd1, 0x1a, 0xf4, 0x77, 0xe4, 0x1a, 0x14, + 0xb0, 0xf9, 0x90, 0x79, 0x21, 0xcc, 0x25, 0x33, 0x46, 0x9e, 0xd6, 0x71, 0xc7, 0x14, 0x84, 0xd4, + 0x74, 0x4c, 0x21, 0xd3, 0x02, 0xa4, 0xa6, 0x93, 0xab, 0x80, 0xc5, 0xf6, 0x04, 0x23, 0x0a, 0xa6, + 0x8d, 0x39, 0x03, 0xfb, 0x65, 0x31, 0x86, 0xbc, 0x0d, 0xd8, 0x27, 0x9b, 0x77, 0x71, 0xfa, 0x74, + 0xb8, 0xc3, 0x5d, 0xa1, 0x3f, 0xd8, 0x6c, 0x7d, 0xc2, 0x52, 0x0b, 0x72, 0x5e, 0x3d, 0xed, 0x0b, + 0x6b, 0xf9, 0x8c, 0x14, 0x9c, 0x11, 0x60, 0x95, 0x37, 0x25, 0x06, 0xe0, 0xa3, 0x59, 0xc0, 0xd1, + 0x30, 0x12, 0x1b, 0x4e, 0xf9, 0x32, 0xcc, 0x6d, 0xdb, 0x16, 0x3b, 0xbd, 0xb8, 0x4a, 0x05, 0x90, + 0x3a, 0x25, 0x09, 0xb3, 0x27, 0xa9, 0x53, 0xbe, 0x02, 0x20, 0xb4, 0x29, 0x20, 0xed, 0xb3, 0x36, + 0xf4, 0x01, 0xd2, 0x7e, 0xf9, 0x26, 0xa4, 0xb7, 0x3a, 0xc7, 0x7b, 0x9d, 0x1e, 0xb9, 0x06, 0x30, + 0xe8, 0x4c, 0x9c, 0xf6, 0x21, 0xee, 0xc3, 0xe7, 0x9f, 0x7f, 0xfe, 0xb9, 0x84, 0x97, 0xbd, 0x1c, + 0xad, 0x65, 0xfb, 0xf1, 0x02, 0xa0, 0x35, 0xe8, 0x6e, 0x99, 0x93, 0x49, 0xa7, 0x67, 0x92, 0x2a, + 0xa4, 0x2d, 0x73, 0x42, 0xa3, 0x9d, 0x84, 0xef, 0x08, 0xcb, 0xfe, 0x2a, 0xf8, 0xa8, 0x95, 0x6d, + 0x84, 0x18, 0x1c, 0x4a, 0x14, 0x48, 0x58, 0x47, 0x43, 0x7c, 0x27, 0x49, 0x19, 0xf4, 0xe7, 0xd2, + 0x25, 0x48, 0x33, 0x0c, 0x21, 0x90, 0xb4, 0x3a, 0x43, 0xb3, 0xc4, 0xfa, 0xc5, 0xdf, 0xe5, 0x5f, + 0x95, 0x00, 0xb6, 0xcd, 0x97, 0x67, 0xe8, 0xd3, 0x47, 0xc5, 0xf4, 0x99, 0x60, 0x7d, 0xde, 0x8f, + 0xeb, 0x93, 0xea, 0xec, 0xd0, 0xb6, 0xbb, 0x6d, 0xb6, 0xc5, 0xec, 0x49, 0x27, 0x47, 0x6b, 0x70, + 0xd7, 0xca, 0x1f, 0x40, 0x61, 0xd3, 0xb2, 0xcc, 0xb1, 0x3b, 0x26, 0x02, 0xc9, 0x67, 0xf6, 0xc4, + 0xe1, 0x6f, 0x4b, 0xf8, 0x9b, 0x94, 0x20, 0x39, 0xb2, 0xc7, 0x0e, 0x9b, 0x67, 0x3d, 0xa9, 0xaf, + 0xae, 0xae, 0x1a, 0x58, 0x43, 0x2e, 0x41, 0xee, 0xc0, 0xb6, 0x2c, 0xf3, 0x80, 0x4e, 0x22, 0x81, + 0x69, 0x8d, 0x5f, 0x51, 0xfe, 0x65, 0x09, 0x0a, 0x2d, 0xe7, 0x99, 0x6f, 0x5c, 0x81, 0xc4, 0x73, + 0xf3, 0x04, 0x87, 0x97, 0x30, 0xe8, 0x4f, 0x7a, 0x54, 0x7e, 0xbe, 0x33, 0x38, 0x62, 0x6f, 0x4d, + 0x05, 0x83, 0x15, 0xc8, 0x05, 0x48, 0xbf, 0x34, 0xfb, 0xbd, 0x67, 0x0e, 0xda, 0x94, 0x0d, 0x5e, + 0x22, 0xb7, 0x20, 0xd5, 0xa7, 0x83, 0x2d, 0x25, 0x71, 0xbd, 0x2e, 0xf8, 0xeb, 0x25, 0xce, 0xc1, + 0x60, 0xa0, 0x1b, 0xd9, 0x6c, 0x57, 0xf9, 0xe8, 0xa3, 0x8f, 0x3e, 0x92, 0xcb, 0x87, 0xb0, 0xe8, + 0x1e, 0xde, 0xc0, 0x64, 0xb7, 0xa1, 0x34, 0x30, 0xed, 0xf6, 0x61, 0xdf, 0xea, 0x0c, 0x06, 0x27, + 0xed, 0x97, 0xb6, 0xd5, 0xee, 0x58, 0x6d, 0x7b, 0x72, 0xd0, 0x19, 0xe3, 0x02, 0x44, 0x77, 0xb1, + 0x38, 0x30, 0xed, 0x26, 0xa3, 0xbd, 0x6f, 0x5b, 0x0f, 0xac, 0x16, 0xe5, 0x94, 0xff, 0x20, 0x09, + 0xb9, 0xad, 0x13, 0xd7, 0xfa, 0x22, 0xa4, 0x0e, 0xec, 0x23, 0x8b, 0xad, 0x65, 0xca, 0x60, 0x05, + 0x6f, 0x8f, 0x64, 0x61, 0x8f, 0x16, 0x21, 0xf5, 0xe2, 0xc8, 0x76, 0x4c, 0x9c, 0x6e, 0xce, 0x60, + 0x05, 0xba, 0x5a, 0x23, 0xd3, 0x29, 0x25, 0x31, 0xb9, 0xa5, 0x3f, 0xfd, 0xf9, 0xa7, 0xce, 0x30, + 0x7f, 0xb2, 0x02, 0x69, 0x9b, 0xae, 0xfe, 0xa4, 0x94, 0xc6, 0x77, 0x35, 0x01, 0x2e, 0xee, 0x8a, + 0xc1, 0x51, 0x64, 0x13, 0x16, 0x5e, 0x9a, 0xed, 0xe1, 0xd1, 0xc4, 0x69, 0xf7, 0xec, 0x76, 0xd7, + 0x34, 0x47, 0xe6, 0xb8, 0x34, 0x87, 0x3d, 0x09, 0x3e, 0x61, 0xd6, 0x42, 0x1a, 0xf3, 0x2f, 0xcd, + 0xad, 0xa3, 0x89, 0xb3, 0x61, 0x3f, 0x46, 0x16, 0xa9, 0x42, 0x6e, 0x6c, 0x52, 0x4f, 0x40, 0x07, + 0x5b, 0x08, 0xf7, 0x1e, 0xa0, 0x66, 0xc7, 0xe6, 0x08, 0x2b, 0xc8, 0x3a, 0x64, 0xf7, 0xfb, 0xcf, + 0xcd, 0xc9, 0x33, 0xb3, 0x5b, 0xca, 0xa8, 0x52, 0x65, 0x5e, 0xbb, 0xe8, 0x73, 0xbc, 0x65, 0x5d, + 0x79, 0x64, 0x0f, 0xec, 0xb1, 0xe1, 0x41, 0xc9, 0x7d, 0xc8, 0x4d, 0xec, 0xa1, 0xc9, 0xf4, 0x9d, + 0xc5, 0xa0, 0x7a, 0x79, 0x16, 0x6f, 0xd7, 0x1e, 0x9a, 0xae, 0x07, 0x73, 0xf1, 0x64, 0x99, 0x0d, + 0x74, 0x9f, 0x5e, 0x9d, 0x4b, 0x80, 0x4f, 0x03, 0x74, 0x40, 0x78, 0x95, 0x26, 0x4b, 0x74, 0x40, + 0xbd, 0x43, 0x7a, 0x23, 0x2a, 0xe5, 0x31, 0xaf, 0xf4, 0xca, 0x4b, 0xb7, 0x20, 0xe7, 0x19, 0xf4, + 0x5d, 0x1f, 0x73, 0x37, 0x39, 0xf4, 0x07, 0xcc, 0xf5, 0x31, 0x5f, 0xf3, 0x06, 0xa4, 0x70, 0xd8, + 0x34, 0x42, 0x19, 0x0d, 0x1a, 0x10, 0x73, 0x90, 0xda, 0x30, 0x1a, 0x8d, 0x6d, 0x45, 0xc2, 0xd8, + 0xf8, 0xf4, 0xdd, 0x86, 0x22, 0x0b, 0x8a, 0xfd, 0x6d, 0x09, 0x12, 0x8d, 0x63, 0x54, 0x0b, 0x9d, + 0x86, 0x7b, 0xa2, 0xe9, 0x6f, 0xad, 0x06, 0xc9, 0xa1, 0x3d, 0x36, 0xc9, 0xf9, 0x19, 0xb3, 0x2c, + 0xf5, 0x70, 0xbf, 0x84, 0x57, 0xe4, 0xc6, 0xb1, 0x63, 0x20, 0x5e, 0x7b, 0x0b, 0x92, 0x8e, 0x79, + 0xec, 0xcc, 0xe6, 0x3d, 0x63, 0x1d, 0x50, 0x80, 0x76, 0x13, 0xd2, 0xd6, 0xd1, 0x70, 0xdf, 0x1c, + 0xcf, 0x86, 0xf6, 0x71, 0x7a, 0x1c, 0x52, 0x7e, 0x0f, 0x94, 0x47, 0xf6, 0x70, 0x34, 0x30, 0x8f, + 0x1b, 0xc7, 0x8e, 0x69, 0x4d, 0xfa, 0xb6, 0x45, 0xf5, 0x7c, 0xd8, 0x1f, 0xa3, 0x17, 0xc1, 0xb7, + 0x62, 0x2c, 0xd0, 0x53, 0x3d, 0x31, 0x0f, 0x6c, 0xab, 0xcb, 0x1d, 0x26, 0x2f, 0x51, 0xb4, 0xf3, + 0xac, 0x3f, 0xa6, 0x0e, 0x84, 0xfa, 0x79, 0x56, 0x28, 0x6f, 0x40, 0x91, 0xe7, 0x18, 0x13, 0xde, + 0x71, 0xf9, 0x06, 0x14, 0xdc, 0x2a, 0x7c, 0x38, 0xcf, 0x42, 0xf2, 0x83, 0x86, 0xd1, 0x52, 0xce, + 0xd1, 0x65, 0x6d, 0x6d, 0x37, 0x14, 0x89, 0xfe, 0xd8, 0x7b, 0xbf, 0x15, 0x58, 0xca, 0x4b, 0x50, + 0xf0, 0xc6, 0xbe, 0x6b, 0x3a, 0xd8, 0x42, 0x03, 0x42, 0xa6, 0x2e, 0x67, 0xa5, 0x72, 0x06, 0x52, + 0x8d, 0xe1, 0xc8, 0x39, 0x29, 0xff, 0x22, 0xe4, 0x39, 0xe8, 0x69, 0x7f, 0xe2, 0x90, 0x3b, 0x90, + 0x19, 0xf2, 0xf9, 0x4a, 0x78, 0xdd, 0x13, 0x35, 0xe5, 0xe3, 0xdc, 0xdf, 0x86, 0x8b, 0x5e, 0xaa, + 0x42, 0x46, 0xf0, 0xa5, 0xfc, 0xa8, 0xcb, 0xe2, 0x51, 0x67, 0x4e, 0x21, 0x21, 0x38, 0x85, 0xf2, + 0x16, 0x64, 0x58, 0x04, 0x9c, 0x60, 0x54, 0x67, 0xa9, 0x22, 0x13, 0x13, 0xdb, 0xf9, 0x3c, 0xab, + 0x63, 0x17, 0x95, 0xab, 0x90, 0x47, 0xc1, 0x72, 0x04, 0x73, 0x9d, 0x80, 0x55, 0x4c, 0x6e, 0xbf, + 0x9f, 0x82, 0xac, 0xbb, 0x52, 0x64, 0x19, 0xd2, 0x2c, 0x3f, 0x43, 0x53, 0xee, 0xfb, 0x41, 0x0a, + 0x33, 0x32, 0xb2, 0x0c, 0x19, 0x9e, 0x83, 0x71, 0xef, 0x2e, 0x57, 0x35, 0x23, 0xcd, 0x72, 0x2e, + 0xaf, 0xb1, 0xa6, 0xa3, 0x63, 0x62, 0x2f, 0x03, 0x69, 0x96, 0x55, 0x11, 0x15, 0x72, 0x5e, 0x1e, + 0x85, 0xfe, 0x98, 0x3f, 0x03, 0x64, 0xdd, 0xc4, 0x49, 0x40, 0xd4, 0x74, 0xf4, 0x58, 0x3c, 0xe7, + 0xcf, 0x36, 0xfd, 0xeb, 0x49, 0xd6, 0xcd, 0x86, 0xf0, 0xf9, 0xde, 0x4d, 0xf0, 0x33, 0x3c, 0xff, + 0xf1, 0x01, 0x35, 0x1d, 0x5d, 0x82, 0x9b, 0xcd, 0x67, 0x78, 0x8e, 0x43, 0xae, 0xd2, 0x21, 0x62, + 0xce, 0x82, 0x47, 0xdf, 0x4f, 0xdd, 0xd3, 0x2c, 0x93, 0x21, 0xd7, 0xa8, 0x05, 0x96, 0x98, 0xe0, + 0xb9, 0xf4, 0xf3, 0xf4, 0x0c, 0xcf, 0x57, 0xc8, 0x4d, 0x0a, 0x61, 0xcb, 0x5f, 0x82, 0x88, 0xa4, + 0x3c, 0xc3, 0x93, 0x72, 0xa2, 0xd2, 0x0e, 0xd1, 0x3d, 0xa0, 0x4b, 0x10, 0x12, 0xf0, 0x34, 0x4b, + 0xc0, 0xc9, 0x15, 0x34, 0xc7, 0x26, 0x55, 0xf0, 0x93, 0xed, 0x0c, 0x4f, 0x70, 0xfc, 0x76, 0xbc, + 0xb2, 0x79, 0x89, 0x75, 0x86, 0xa7, 0x30, 0xa4, 0x46, 0xf7, 0x8b, 0xea, 0xbb, 0x34, 0x8f, 0x4e, + 0xb0, 0xe4, 0x0b, 0xcf, 0xdd, 0x53, 0xe6, 0x03, 0xeb, 0xcc, 0x83, 0x18, 0xa9, 0x26, 0x9e, 0x86, + 0x25, 0xca, 0xdb, 0xe9, 0x5b, 0x87, 0xa5, 0x22, 0xae, 0x44, 0xa2, 0x6f, 0x1d, 0x1a, 0xa9, 0x26, + 0xad, 0x61, 0x1a, 0xd8, 0xa6, 0x6d, 0x0a, 0xb6, 0x25, 0x6f, 0xb3, 0x46, 0x5a, 0x45, 0x4a, 0x90, + 0x6a, 0xb6, 0xb7, 0x3b, 0x56, 0x69, 0x81, 0xf1, 0xac, 0x8e, 0x65, 0x24, 0x9b, 0xdb, 0x1d, 0x8b, + 0xbc, 0x05, 0x89, 0xc9, 0xd1, 0x7e, 0x89, 0x84, 0xbf, 0xac, 0xec, 0x1e, 0xed, 0xbb, 0x43, 0x31, + 0x28, 0x82, 0x2c, 0x43, 0x76, 0xe2, 0x8c, 0xdb, 0xbf, 0x60, 0x8e, 0xed, 0xd2, 0x79, 0x5c, 0xc2, + 0x73, 0x46, 0x66, 0xe2, 0x8c, 0x3f, 0x30, 0xc7, 0xf6, 0x19, 0x9d, 0x5f, 0xf9, 0x0a, 0xe4, 0x05, + 0xbb, 0xa4, 0x08, 0x92, 0xc5, 0x6e, 0x0a, 0x75, 0xe9, 0x8e, 0x21, 0x59, 0xe5, 0x3d, 0x28, 0xb8, + 0x39, 0x0c, 0xce, 0x57, 0xa3, 0x27, 0x69, 0x60, 0x8f, 0xf1, 0x7c, 0xce, 0x6b, 0x97, 0xc4, 0x10, + 0xe5, 0xc3, 0x78, 0xb8, 0x60, 0xd0, 0xb2, 0x12, 0x1a, 0x8a, 0x54, 0xfe, 0xa1, 0x04, 0x85, 0x2d, + 0x7b, 0xec, 0x3f, 0x30, 0x2f, 0x42, 0x6a, 0xdf, 0xb6, 0x07, 0x13, 0x34, 0x9b, 0x35, 0x58, 0x81, + 0xbc, 0x01, 0x05, 0xfc, 0xe1, 0xe6, 0x9e, 0xb2, 0xf7, 0xb4, 0x91, 0xc7, 0x7a, 0x9e, 0x70, 0x12, + 0x48, 0xf6, 0x2d, 0x67, 0xc2, 0x3d, 0x19, 0xfe, 0x26, 0x5f, 0x80, 0x3c, 0xfd, 0xeb, 0x32, 0x93, + 0xde, 0x85, 0x15, 0x68, 0x35, 0x27, 0xbe, 0x05, 0x73, 0xb8, 0xfb, 0x1e, 0x2c, 0xe3, 0x3d, 0x63, + 0x14, 0x58, 0x03, 0x07, 0x96, 0x20, 0xc3, 0x5c, 0xc1, 0x04, 0xbf, 0x96, 0xe5, 0x0c, 0xb7, 0x48, + 0xdd, 0x2b, 0x66, 0x02, 0x2c, 0xdc, 0x67, 0x0c, 0x5e, 0x2a, 0x3f, 0x80, 0x2c, 0x46, 0xa9, 0xd6, + 0xa0, 0x4b, 0xca, 0x20, 0xf5, 0x4a, 0x26, 0xc6, 0xc8, 0x45, 0xe1, 0x9a, 0xcf, 0x9b, 0x57, 0x36, + 0x0c, 0xa9, 0xb7, 0xb4, 0x00, 0xd2, 0x06, 0xbd, 0x77, 0x1f, 0x73, 0x37, 0x2d, 0x1d, 0x97, 0x5b, + 0xdc, 0xc4, 0xb6, 0xf9, 0x32, 0xce, 0xc4, 0xb6, 0xf9, 0x92, 0x99, 0xb8, 0x3a, 0x65, 0x82, 0x96, + 0x4e, 0xf8, 0xa7, 0x43, 0xe9, 0xa4, 0x5c, 0x85, 0x39, 0x3c, 0x9e, 0x7d, 0xab, 0xb7, 0x63, 0xf7, + 0x2d, 0xbc, 0xe7, 0x1f, 0xe2, 0x3d, 0x49, 0x32, 0xa4, 0x43, 0xba, 0x07, 0xe6, 0x71, 0xe7, 0x80, + 0xdd, 0x38, 0xb3, 0x06, 0x2b, 0x94, 0x3f, 0x4b, 0xc2, 0x3c, 0x77, 0xad, 0xef, 0xf7, 0x9d, 0x67, + 0x5b, 0x9d, 0x11, 0x79, 0x0a, 0x05, 0xea, 0x55, 0xdb, 0xc3, 0xce, 0x68, 0x44, 0x8f, 0xaf, 0x84, + 0x57, 0x8d, 0xeb, 0x53, 0xae, 0x9a, 0xe3, 0x57, 0xb6, 0x3b, 0x43, 0x73, 0x8b, 0x61, 0x1b, 0x96, + 0x33, 0x3e, 0x31, 0xf2, 0x96, 0x5f, 0x43, 0x36, 0x21, 0x3f, 0x9c, 0xf4, 0x3c, 0x63, 0x32, 0x1a, + 0xab, 0x44, 0x1a, 0xdb, 0x9a, 0xf4, 0x02, 0xb6, 0x60, 0xe8, 0x55, 0xd0, 0x81, 0x51, 0x7f, 0xec, + 0xd9, 0x4a, 0x9c, 0x32, 0x30, 0xea, 0x3a, 0x82, 0x03, 0xdb, 0xf7, 0x6b, 0xc8, 0x63, 0x00, 0x7a, + 0xbc, 0x1c, 0x9b, 0xa6, 0x4e, 0xa8, 0xa0, 0xbc, 0xf6, 0x66, 0xa4, 0xad, 0x5d, 0x67, 0xbc, 0x67, + 0xef, 0x3a, 0x63, 0x66, 0x88, 0x1e, 0x4c, 0x2c, 0x2e, 0xbd, 0x03, 0x4a, 0x78, 0xfe, 0xe2, 0x8d, + 0x3c, 0x35, 0xe3, 0x46, 0x9e, 0xe3, 0x37, 0xf2, 0xba, 0x7c, 0x57, 0x5a, 0x7a, 0x0f, 0x8a, 0xa1, + 0x29, 0x8b, 0x74, 0xc2, 0xe8, 0xb7, 0x45, 0x7a, 0x5e, 0x7b, 0x5d, 0xf8, 0x9c, 0x2d, 0x6e, 0xb8, + 0x68, 0xf7, 0x1d, 0x50, 0xc2, 0xd3, 0x17, 0x0d, 0x67, 0x63, 0x32, 0x05, 0xe4, 0xdf, 0x87, 0xb9, + 0xc0, 0x94, 0x45, 0x72, 0xee, 0x94, 0x49, 0x95, 0x7f, 0x29, 0x05, 0xa9, 0x96, 0x65, 0xda, 0x87, + 0xe4, 0xf5, 0x60, 0x9c, 0x7c, 0x72, 0xce, 0x8d, 0x91, 0x17, 0x43, 0x31, 0xf2, 0xc9, 0x39, 0x2f, + 0x42, 0x5e, 0x0c, 0x45, 0x48, 0xb7, 0xa9, 0xa6, 0x93, 0xcb, 0x53, 0xf1, 0xf1, 0xc9, 0x39, 0x21, + 0x38, 0x5e, 0x9e, 0x0a, 0x8e, 0x7e, 0x73, 0x4d, 0xa7, 0x0e, 0x35, 0x18, 0x19, 0x9f, 0x9c, 0xf3, + 0xa3, 0xe2, 0x72, 0x38, 0x2a, 0x7a, 0x8d, 0x35, 0x9d, 0x0d, 0x49, 0x88, 0x88, 0x38, 0x24, 0x16, + 0x0b, 0x97, 0xc3, 0xb1, 0x10, 0x79, 0x3c, 0x0a, 0x2e, 0x87, 0xa3, 0x20, 0x36, 0xf2, 0xa8, 0x77, + 0x31, 0x14, 0xf5, 0xd0, 0x28, 0x0b, 0x77, 0xcb, 0xe1, 0x70, 0xc7, 0x78, 0xc2, 0x48, 0xc5, 0x58, + 0xe7, 0x35, 0xd6, 0x74, 0xa2, 0x85, 0x02, 0x5d, 0xf4, 0x6d, 0x1f, 0xf7, 0x02, 0x9d, 0xbe, 0x4e, + 0x97, 0xcd, 0xbd, 0x88, 0x16, 0x63, 0xbe, 0xf8, 0xe3, 0x6a, 0xba, 0x17, 0x31, 0x0d, 0x32, 0x87, + 0x3c, 0x01, 0x56, 0xd0, 0x73, 0x09, 0xb2, 0xc4, 0xcd, 0x5f, 0x69, 0xb6, 0xd1, 0x83, 0xd1, 0x79, + 0x1d, 0xb2, 0x3b, 0x7d, 0x05, 0xe6, 0x9a, 0xed, 0xa7, 0x9d, 0x71, 0xcf, 0x9c, 0x38, 0xed, 0xbd, + 0x4e, 0xcf, 0x7b, 0x44, 0xa0, 0xfb, 0x9f, 0x6f, 0xf2, 0x96, 0xbd, 0x4e, 0x8f, 0x5c, 0x70, 0xc5, + 0xd5, 0xc5, 0x56, 0x89, 0xcb, 0x6b, 0xe9, 0x75, 0xba, 0x68, 0xcc, 0x18, 0xfa, 0xc2, 0x05, 0xee, + 0x0b, 0x1f, 0x66, 0x20, 0x75, 0x64, 0xf5, 0x6d, 0xeb, 0x61, 0x0e, 0x32, 0x8e, 0x3d, 0x1e, 0x76, + 0x1c, 0xbb, 0xfc, 0x23, 0x09, 0xe0, 0x91, 0x3d, 0x1c, 0x1e, 0x59, 0xfd, 0x17, 0x47, 0x26, 0xb9, + 0x02, 0xf9, 0x61, 0xe7, 0xb9, 0xd9, 0x1e, 0x9a, 0xed, 0x83, 0xb1, 0x7b, 0x0e, 0x72, 0xb4, 0x6a, + 0xcb, 0x7c, 0x34, 0x3e, 0x21, 0x25, 0xf7, 0x8a, 0x8e, 0xda, 0x41, 0x49, 0xf2, 0x2b, 0xfb, 0x22, + 0xbf, 0x74, 0xa6, 0xf9, 0x1e, 0xba, 0xd7, 0x4e, 0x96, 0x47, 0x64, 0xf8, 0xee, 0x61, 0x89, 0x4a, + 0xde, 0x31, 0x87, 0xa3, 0xf6, 0x01, 0x4a, 0x85, 0xca, 0x21, 0x45, 0xcb, 0x8f, 0xc8, 0x6d, 0x48, + 0x1c, 0xd8, 0x03, 0x14, 0xc9, 0x29, 0xfb, 0x42, 0x71, 0xe4, 0x0d, 0x48, 0x0c, 0x27, 0x4c, 0x36, + 0x79, 0x6d, 0x41, 0xb8, 0x27, 0xb0, 0xd0, 0x44, 0x61, 0xc3, 0x49, 0xcf, 0x9b, 0xf7, 0x8d, 0x22, + 0x24, 0x9a, 0xad, 0x16, 0x8d, 0xfd, 0xcd, 0x56, 0x6b, 0x4d, 0x91, 0xea, 0x5f, 0x82, 0x6c, 0x6f, + 0x6c, 0x9a, 0xd4, 0x3d, 0xcc, 0xce, 0x39, 0x3e, 0xc4, 0x58, 0xe7, 0x81, 0xea, 0x5b, 0x90, 0x39, + 0x60, 0x59, 0x07, 0x89, 0x48, 0x6b, 0x4b, 0x7f, 0xc8, 0x1e, 0x55, 0x96, 0xfc, 0xe6, 0x70, 0x9e, + 0x62, 0xb8, 0x36, 0xea, 0x3b, 0x90, 0x1b, 0xb7, 0x4f, 0x33, 0xf8, 0x31, 0x8b, 0x2e, 0x71, 0x06, + 0xb3, 0x63, 0x5e, 0x55, 0x6f, 0xc0, 0x82, 0x65, 0xbb, 0xdf, 0x50, 0xda, 0x5d, 0x76, 0xc6, 0x2e, + 0x4e, 0x5f, 0xe5, 0x5c, 0xe3, 0x26, 0xfb, 0x6e, 0x69, 0xd9, 0xbc, 0x81, 0x9d, 0xca, 0xfa, 0x23, + 0x50, 0x04, 0x33, 0x98, 0x7a, 0xc6, 0x59, 0x39, 0x64, 0x1f, 0x4a, 0x3d, 0x2b, 0x78, 0xee, 0x43, + 0x46, 0xd8, 0xc9, 0x8c, 0x31, 0xd2, 0x63, 0x5f, 0x9d, 0x3d, 0x23, 0xe8, 0xea, 0xa6, 0x8d, 0x50, + 0x5f, 0x13, 0x6d, 0xe4, 0x19, 0xfb, 0x20, 0x2d, 0x1a, 0xa9, 0xe9, 0xa1, 0x55, 0x39, 0x3a, 0x75, + 0x28, 0x7d, 0xf6, 0x3d, 0xd9, 0xb3, 0xc2, 0x1c, 0xe0, 0x0c, 0x33, 0xf1, 0x83, 0xf9, 0x90, 0x7d, + 0x6a, 0x0e, 0x98, 0x99, 0x1a, 0xcd, 0xe4, 0xd4, 0xd1, 0x3c, 0x67, 0xdf, 0x75, 0x3d, 0x33, 0xbb, + 0xb3, 0x46, 0x33, 0x39, 0x75, 0x34, 0x03, 0xf6, 0xc5, 0x37, 0x60, 0xa6, 0xa6, 0xd7, 0x37, 0x80, + 0x88, 0x5b, 0xcd, 0xe3, 0x44, 0x8c, 0x9d, 0x21, 0xfb, 0x8e, 0xef, 0x6f, 0x36, 0xa3, 0xcc, 0x32, + 0x14, 0x3f, 0x20, 0x8b, 0x7d, 0xe2, 0x0f, 0x1a, 0xaa, 0xe9, 0xf5, 0x4d, 0x38, 0x2f, 0x4e, 0xec, + 0x0c, 0x43, 0xb2, 0x55, 0xa9, 0x52, 0x34, 0x16, 0xfc, 0xa9, 0x71, 0xce, 0x4c, 0x53, 0xf1, 0x83, + 0x1a, 0xa9, 0x52, 0x45, 0x99, 0x32, 0x55, 0xd3, 0xeb, 0x0f, 0xa0, 0x28, 0x98, 0xda, 0xc7, 0x08, + 0x1d, 0x6d, 0xe6, 0x05, 0xfb, 0x5f, 0x0b, 0xcf, 0x0c, 0x8d, 0xe8, 0xe1, 0x1d, 0xe3, 0x31, 0x2e, + 0xda, 0xc8, 0x98, 0xfd, 0xa3, 0x80, 0x3f, 0x16, 0x64, 0x84, 0x8e, 0x04, 0xe6, 0xdf, 0x71, 0x56, + 0x26, 0xec, 0x5f, 0x08, 0xfc, 0xa1, 0x50, 0x42, 0xbd, 0x1f, 0x98, 0x8e, 0x49, 0x83, 0x5c, 0x8c, + 0x0d, 0x07, 0x3d, 0xf2, 0x9b, 0x91, 0x80, 0x15, 0xf1, 0x81, 0x44, 0x98, 0x36, 0x2d, 0xd6, 0x37, + 0x61, 0xfe, 0xec, 0x0e, 0xe9, 0x63, 0x89, 0x65, 0xcb, 0xd5, 0x15, 0x9a, 0x50, 0x1b, 0x73, 0xdd, + 0x80, 0x5f, 0x6a, 0xc0, 0xdc, 0x99, 0x9d, 0xd2, 0x27, 0x12, 0xcb, 0x39, 0xa9, 0x25, 0xa3, 0xd0, + 0x0d, 0x7a, 0xa6, 0xb9, 0x33, 0xbb, 0xa5, 0x4f, 0x25, 0xf6, 0x40, 0xa1, 0x6b, 0x9e, 0x11, 0xd7, + 0x33, 0xcd, 0x9d, 0xd9, 0x2d, 0x7d, 0x95, 0x65, 0x94, 0xb2, 0x5e, 0x15, 0x8d, 0xa0, 0x2f, 0x98, + 0x3f, 0xbb, 0x5b, 0xfa, 0x9a, 0x84, 0x8f, 0x15, 0xb2, 0xae, 0x7b, 0xeb, 0xe2, 0x79, 0xa6, 0xf9, + 0xb3, 0xbb, 0xa5, 0xaf, 0x4b, 0xf8, 0xa4, 0x21, 0xeb, 0xeb, 0x01, 0x33, 0xc1, 0xd1, 0x9c, 0xee, + 0x96, 0xbe, 0x21, 0xe1, 0x2b, 0x83, 0xac, 0xd7, 0x3c, 0x33, 0xbb, 0x53, 0xa3, 0x39, 0xdd, 0x2d, + 0x7d, 0x13, 0x6f, 0xf1, 0x75, 0x59, 0xbf, 0x13, 0x30, 0x83, 0x9e, 0xa9, 0xf8, 0x0a, 0x6e, 0xe9, + 0x5b, 0x12, 0x3e, 0x06, 0xc9, 0xfa, 0x5d, 0xc3, 0xed, 0xdd, 0xf7, 0x4c, 0xc5, 0x57, 0x70, 0x4b, + 0x9f, 0x49, 0xf8, 0x66, 0x24, 0xeb, 0xf7, 0x82, 0x86, 0xd0, 0x33, 0x29, 0xaf, 0xe2, 0x96, 0xbe, + 0x4d, 0x2d, 0x15, 0xeb, 0xf2, 0xfa, 0xaa, 0xe1, 0x0e, 0x40, 0xf0, 0x4c, 0xca, 0xab, 0xb8, 0xa5, + 0xef, 0x50, 0x53, 0x4a, 0x5d, 0x5e, 0x5f, 0x0b, 0x99, 0xaa, 0xe9, 0xf5, 0x47, 0x50, 0x38, 0xab, + 0x5b, 0xfa, 0xae, 0xf8, 0x16, 0x97, 0xef, 0x0a, 0xbe, 0x69, 0x47, 0xd8, 0xb3, 0x53, 0x1d, 0xd3, + 0xf7, 0x30, 0xc7, 0xa9, 0xcf, 0x3d, 0x61, 0xef, 0x55, 0x8c, 0xe0, 0x6f, 0x1f, 0x73, 0x53, 0x5b, + 0xfe, 0xf9, 0x38, 0xd5, 0x47, 0x7d, 0x5f, 0xc2, 0x47, 0xad, 0x02, 0x37, 0x88, 0x78, 0xef, 0xa4, + 0x30, 0x87, 0xf5, 0xa1, 0x3f, 0xcb, 0xd3, 0xbc, 0xd5, 0x0f, 0xa4, 0x57, 0x71, 0x57, 0xf5, 0x44, + 0x6b, 0xbb, 0xe1, 0x2d, 0x06, 0xd6, 0xbc, 0x0d, 0xc9, 0x63, 0x6d, 0x75, 0x4d, 0xbc, 0x92, 0x89, + 0x6f, 0xb9, 0xcc, 0x49, 0xe5, 0xb5, 0xa2, 0xf0, 0xdc, 0x3d, 0x1c, 0x39, 0x27, 0x06, 0xb2, 0x38, + 0x5b, 0x8b, 0x64, 0x7f, 0x12, 0xc3, 0xd6, 0x38, 0xbb, 0x1a, 0xc9, 0xfe, 0x34, 0x86, 0x5d, 0xe5, + 0x6c, 0x3d, 0x92, 0xfd, 0xd5, 0x18, 0xb6, 0xce, 0xd9, 0xeb, 0x91, 0xec, 0xaf, 0xc5, 0xb0, 0xd7, + 0x39, 0xbb, 0x16, 0xc9, 0xfe, 0x7a, 0x0c, 0xbb, 0xc6, 0xd9, 0x77, 0x22, 0xd9, 0xdf, 0x88, 0x61, + 0xdf, 0xe1, 0xec, 0xbb, 0x91, 0xec, 0x6f, 0xc6, 0xb0, 0xef, 0x72, 0xf6, 0xbd, 0x48, 0xf6, 0xb7, + 0x62, 0xd8, 0xf7, 0x18, 0x7b, 0x6d, 0x35, 0x92, 0xfd, 0x59, 0x34, 0x7b, 0x6d, 0x95, 0xb3, 0xa3, + 0xb5, 0xf6, 0xed, 0x18, 0x36, 0xd7, 0xda, 0x5a, 0xb4, 0xd6, 0xbe, 0x13, 0xc3, 0xe6, 0x5a, 0x5b, + 0x8b, 0xd6, 0xda, 0x77, 0x63, 0xd8, 0x5c, 0x6b, 0x6b, 0xd1, 0x5a, 0xfb, 0x5e, 0x0c, 0x9b, 0x6b, + 0x6d, 0x2d, 0x5a, 0x6b, 0xdf, 0x8f, 0x61, 0x73, 0xad, 0xad, 0x45, 0x6b, 0xed, 0x07, 0x31, 0x6c, + 0xae, 0xb5, 0xb5, 0x68, 0xad, 0xfd, 0x51, 0x0c, 0x9b, 0x6b, 0x6d, 0x2d, 0x5a, 0x6b, 0x7f, 0x1c, + 0xc3, 0xe6, 0x5a, 0x5b, 0x8b, 0xd6, 0xda, 0x9f, 0xc4, 0xb0, 0xb9, 0xd6, 0xb4, 0x68, 0xad, 0xfd, + 0x69, 0x34, 0x5b, 0xe3, 0x5a, 0xd3, 0xa2, 0xb5, 0xf6, 0x67, 0x31, 0x6c, 0xae, 0x35, 0x2d, 0x5a, + 0x6b, 0x7f, 0x1e, 0xc3, 0xe6, 0x5a, 0xd3, 0xa2, 0xb5, 0xf6, 0xc3, 0x18, 0x36, 0xd7, 0x9a, 0x16, + 0xad, 0xb5, 0xbf, 0x88, 0x61, 0x73, 0xad, 0x69, 0xd1, 0x5a, 0xfb, 0xcb, 0x18, 0x36, 0xd7, 0x9a, + 0x16, 0xad, 0xb5, 0xbf, 0x8a, 0x61, 0x73, 0xad, 0x69, 0xd1, 0x5a, 0xfb, 0xeb, 0x18, 0x36, 0xd7, + 0x9a, 0x16, 0xad, 0xb5, 0xbf, 0x89, 0x61, 0x73, 0xad, 0x69, 0xd1, 0x5a, 0xfb, 0xdb, 0x18, 0x36, + 0xd7, 0x5a, 0x35, 0x5a, 0x6b, 0x7f, 0x17, 0xcd, 0xae, 0x72, 0xad, 0x55, 0xa3, 0xb5, 0xf6, 0xf7, + 0x31, 0x6c, 0xae, 0xb5, 0x6a, 0xb4, 0xd6, 0xfe, 0x21, 0x86, 0xcd, 0xb5, 0x56, 0x8d, 0xd6, 0xda, + 0x3f, 0xc6, 0xb0, 0xb9, 0xd6, 0xaa, 0xd1, 0x5a, 0xfb, 0x51, 0x0c, 0x9b, 0x6b, 0xad, 0x1a, 0xad, + 0xb5, 0x7f, 0x8a, 0x61, 0x73, 0xad, 0x55, 0xa3, 0xb5, 0xf6, 0xcf, 0x31, 0x6c, 0xae, 0xb5, 0x6a, + 0xb4, 0xd6, 0xfe, 0x25, 0x86, 0xcd, 0xb5, 0x56, 0x8d, 0xd6, 0xda, 0xbf, 0xc6, 0xb0, 0xb9, 0xd6, + 0xaa, 0xd1, 0x5a, 0xfb, 0xb7, 0x18, 0x36, 0xd7, 0x9a, 0x1e, 0xad, 0xb5, 0x7f, 0x8f, 0x66, 0xeb, + 0x5c, 0x6b, 0x7a, 0xb4, 0xd6, 0xfe, 0x23, 0x86, 0xcd, 0xb5, 0xa6, 0x47, 0x6b, 0xed, 0x3f, 0x63, + 0xd8, 0x5c, 0x6b, 0x7a, 0xb4, 0xd6, 0xfe, 0x2b, 0x86, 0xcd, 0xb5, 0xa6, 0x47, 0x6b, 0xed, 0xbf, + 0x63, 0xd8, 0x5c, 0x6b, 0x7a, 0xb4, 0xd6, 0xfe, 0x27, 0x86, 0xcd, 0xb5, 0xa6, 0x47, 0x6b, 0xed, + 0xc7, 0x31, 0x6c, 0xae, 0x35, 0x3d, 0x5a, 0x6b, 0x3f, 0x89, 0x61, 0x73, 0xad, 0xe9, 0xd1, 0x5a, + 0xfb, 0xdf, 0x18, 0x36, 0xd7, 0x9a, 0x1e, 0xad, 0xb5, 0xff, 0x8b, 0x61, 0x73, 0xad, 0xad, 0x47, + 0x6b, 0xed, 0xff, 0xa3, 0xd9, 0xeb, 0xab, 0x3f, 0x0d, 0x00, 0x00, 0xff, 0xff, 0xaa, 0x00, 0xcd, + 0x32, 0x57, 0x39, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/proto/testdata/test.proto b/vendor/github.com/golang/protobuf/proto/testdata/test.proto new file mode 100644 index 000000000..70e3cfcda --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/testdata/test.proto @@ -0,0 +1,548 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2010 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// A feature-rich test file for the protocol compiler and libraries. + +syntax = "proto2"; + +package testdata; + +enum FOO { FOO1 = 1; }; + +message GoEnum { + required FOO foo = 1; +} + +message GoTestField { + required string Label = 1; + required string Type = 2; +} + +message GoTest { + // An enum, for completeness. + enum KIND { + VOID = 0; + + // Basic types + BOOL = 1; + BYTES = 2; + FINGERPRINT = 3; + FLOAT = 4; + INT = 5; + STRING = 6; + TIME = 7; + + // Groupings + TUPLE = 8; + ARRAY = 9; + MAP = 10; + + // Table types + TABLE = 11; + + // Functions + FUNCTION = 12; // last tag + }; + + // Some typical parameters + required KIND Kind = 1; + optional string Table = 2; + optional int32 Param = 3; + + // Required, repeated and optional foreign fields. + required GoTestField RequiredField = 4; + repeated GoTestField RepeatedField = 5; + optional GoTestField OptionalField = 6; + + // Required fields of all basic types + required bool F_Bool_required = 10; + required int32 F_Int32_required = 11; + required int64 F_Int64_required = 12; + required fixed32 F_Fixed32_required = 13; + required fixed64 F_Fixed64_required = 14; + required uint32 F_Uint32_required = 15; + required uint64 F_Uint64_required = 16; + required float F_Float_required = 17; + required double F_Double_required = 18; + required string F_String_required = 19; + required bytes F_Bytes_required = 101; + required sint32 F_Sint32_required = 102; + required sint64 F_Sint64_required = 103; + + // Repeated fields of all basic types + repeated bool F_Bool_repeated = 20; + repeated int32 F_Int32_repeated = 21; + repeated int64 F_Int64_repeated = 22; + repeated fixed32 F_Fixed32_repeated = 23; + repeated fixed64 F_Fixed64_repeated = 24; + repeated uint32 F_Uint32_repeated = 25; + repeated uint64 F_Uint64_repeated = 26; + repeated float F_Float_repeated = 27; + repeated double F_Double_repeated = 28; + repeated string F_String_repeated = 29; + repeated bytes F_Bytes_repeated = 201; + repeated sint32 F_Sint32_repeated = 202; + repeated sint64 F_Sint64_repeated = 203; + + // Optional fields of all basic types + optional bool F_Bool_optional = 30; + optional int32 F_Int32_optional = 31; + optional int64 F_Int64_optional = 32; + optional fixed32 F_Fixed32_optional = 33; + optional fixed64 F_Fixed64_optional = 34; + optional uint32 F_Uint32_optional = 35; + optional uint64 F_Uint64_optional = 36; + optional float F_Float_optional = 37; + optional double F_Double_optional = 38; + optional string F_String_optional = 39; + optional bytes F_Bytes_optional = 301; + optional sint32 F_Sint32_optional = 302; + optional sint64 F_Sint64_optional = 303; + + // Default-valued fields of all basic types + optional bool F_Bool_defaulted = 40 [default=true]; + optional int32 F_Int32_defaulted = 41 [default=32]; + optional int64 F_Int64_defaulted = 42 [default=64]; + optional fixed32 F_Fixed32_defaulted = 43 [default=320]; + optional fixed64 F_Fixed64_defaulted = 44 [default=640]; + optional uint32 F_Uint32_defaulted = 45 [default=3200]; + optional uint64 F_Uint64_defaulted = 46 [default=6400]; + optional float F_Float_defaulted = 47 [default=314159.]; + optional double F_Double_defaulted = 48 [default=271828.]; + optional string F_String_defaulted = 49 [default="hello, \"world!\"\n"]; + optional bytes F_Bytes_defaulted = 401 [default="Bignose"]; + optional sint32 F_Sint32_defaulted = 402 [default = -32]; + optional sint64 F_Sint64_defaulted = 403 [default = -64]; + + // Packed repeated fields (no string or bytes). + repeated bool F_Bool_repeated_packed = 50 [packed=true]; + repeated int32 F_Int32_repeated_packed = 51 [packed=true]; + repeated int64 F_Int64_repeated_packed = 52 [packed=true]; + repeated fixed32 F_Fixed32_repeated_packed = 53 [packed=true]; + repeated fixed64 F_Fixed64_repeated_packed = 54 [packed=true]; + repeated uint32 F_Uint32_repeated_packed = 55 [packed=true]; + repeated uint64 F_Uint64_repeated_packed = 56 [packed=true]; + repeated float F_Float_repeated_packed = 57 [packed=true]; + repeated double F_Double_repeated_packed = 58 [packed=true]; + repeated sint32 F_Sint32_repeated_packed = 502 [packed=true]; + repeated sint64 F_Sint64_repeated_packed = 503 [packed=true]; + + // Required, repeated, and optional groups. + required group RequiredGroup = 70 { + required string RequiredField = 71; + }; + + repeated group RepeatedGroup = 80 { + required string RequiredField = 81; + }; + + optional group OptionalGroup = 90 { + required string RequiredField = 91; + }; +} + +// For testing a group containing a required field. +message GoTestRequiredGroupField { + required group Group = 1 { + required int32 Field = 2; + }; +} + +// For testing skipping of unrecognized fields. +// Numbers are all big, larger than tag numbers in GoTestField, +// the message used in the corresponding test. +message GoSkipTest { + required int32 skip_int32 = 11; + required fixed32 skip_fixed32 = 12; + required fixed64 skip_fixed64 = 13; + required string skip_string = 14; + required group SkipGroup = 15 { + required int32 group_int32 = 16; + required string group_string = 17; + } +} + +// For testing packed/non-packed decoder switching. +// A serialized instance of one should be deserializable as the other. +message NonPackedTest { + repeated int32 a = 1; +} + +message PackedTest { + repeated int32 b = 1 [packed=true]; +} + +message MaxTag { + // Maximum possible tag number. + optional string last_field = 536870911; +} + +message OldMessage { + message Nested { + optional string name = 1; + } + optional Nested nested = 1; + + optional int32 num = 2; +} + +// NewMessage is wire compatible with OldMessage; +// imagine it as a future version. +message NewMessage { + message Nested { + optional string name = 1; + optional string food_group = 2; + } + optional Nested nested = 1; + + // This is an int32 in OldMessage. + optional int64 num = 2; +} + +// Smaller tests for ASCII formatting. + +message InnerMessage { + required string host = 1; + optional int32 port = 2 [default=4000]; + optional bool connected = 3; +} + +message OtherMessage { + optional int64 key = 1; + optional bytes value = 2; + optional float weight = 3; + optional InnerMessage inner = 4; + + extensions 100 to max; +} + +message RequiredInnerMessage { + required InnerMessage leo_finally_won_an_oscar = 1; +} + +message MyMessage { + required int32 count = 1; + optional string name = 2; + optional string quote = 3; + repeated string pet = 4; + optional InnerMessage inner = 5; + repeated OtherMessage others = 6; + optional RequiredInnerMessage we_must_go_deeper = 13; + repeated InnerMessage rep_inner = 12; + + enum Color { + RED = 0; + GREEN = 1; + BLUE = 2; + }; + optional Color bikeshed = 7; + + optional group SomeGroup = 8 { + optional int32 group_field = 9; + } + + // This field becomes [][]byte in the generated code. + repeated bytes rep_bytes = 10; + + optional double bigfloat = 11; + + extensions 100 to max; +} + +message Ext { + extend MyMessage { + optional Ext more = 103; + optional string text = 104; + optional int32 number = 105; + } + + optional string data = 1; +} + +extend MyMessage { + repeated string greeting = 106; +} + +message ComplexExtension { + optional int32 first = 1; + optional int32 second = 2; + repeated int32 third = 3; +} + +extend OtherMessage { + optional ComplexExtension complex = 200; + repeated ComplexExtension r_complex = 201; +} + +message DefaultsMessage { + enum DefaultsEnum { + ZERO = 0; + ONE = 1; + TWO = 2; + }; + extensions 100 to max; +} + +extend DefaultsMessage { + optional double no_default_double = 101; + optional float no_default_float = 102; + optional int32 no_default_int32 = 103; + optional int64 no_default_int64 = 104; + optional uint32 no_default_uint32 = 105; + optional uint64 no_default_uint64 = 106; + optional sint32 no_default_sint32 = 107; + optional sint64 no_default_sint64 = 108; + optional fixed32 no_default_fixed32 = 109; + optional fixed64 no_default_fixed64 = 110; + optional sfixed32 no_default_sfixed32 = 111; + optional sfixed64 no_default_sfixed64 = 112; + optional bool no_default_bool = 113; + optional string no_default_string = 114; + optional bytes no_default_bytes = 115; + optional DefaultsMessage.DefaultsEnum no_default_enum = 116; + + optional double default_double = 201 [default = 3.1415]; + optional float default_float = 202 [default = 3.14]; + optional int32 default_int32 = 203 [default = 42]; + optional int64 default_int64 = 204 [default = 43]; + optional uint32 default_uint32 = 205 [default = 44]; + optional uint64 default_uint64 = 206 [default = 45]; + optional sint32 default_sint32 = 207 [default = 46]; + optional sint64 default_sint64 = 208 [default = 47]; + optional fixed32 default_fixed32 = 209 [default = 48]; + optional fixed64 default_fixed64 = 210 [default = 49]; + optional sfixed32 default_sfixed32 = 211 [default = 50]; + optional sfixed64 default_sfixed64 = 212 [default = 51]; + optional bool default_bool = 213 [default = true]; + optional string default_string = 214 [default = "Hello, string"]; + optional bytes default_bytes = 215 [default = "Hello, bytes"]; + optional DefaultsMessage.DefaultsEnum default_enum = 216 [default = ONE]; +} + +message MyMessageSet { + option message_set_wire_format = true; + extensions 100 to max; +} + +message Empty { +} + +extend MyMessageSet { + optional Empty x201 = 201; + optional Empty x202 = 202; + optional Empty x203 = 203; + optional Empty x204 = 204; + optional Empty x205 = 205; + optional Empty x206 = 206; + optional Empty x207 = 207; + optional Empty x208 = 208; + optional Empty x209 = 209; + optional Empty x210 = 210; + optional Empty x211 = 211; + optional Empty x212 = 212; + optional Empty x213 = 213; + optional Empty x214 = 214; + optional Empty x215 = 215; + optional Empty x216 = 216; + optional Empty x217 = 217; + optional Empty x218 = 218; + optional Empty x219 = 219; + optional Empty x220 = 220; + optional Empty x221 = 221; + optional Empty x222 = 222; + optional Empty x223 = 223; + optional Empty x224 = 224; + optional Empty x225 = 225; + optional Empty x226 = 226; + optional Empty x227 = 227; + optional Empty x228 = 228; + optional Empty x229 = 229; + optional Empty x230 = 230; + optional Empty x231 = 231; + optional Empty x232 = 232; + optional Empty x233 = 233; + optional Empty x234 = 234; + optional Empty x235 = 235; + optional Empty x236 = 236; + optional Empty x237 = 237; + optional Empty x238 = 238; + optional Empty x239 = 239; + optional Empty x240 = 240; + optional Empty x241 = 241; + optional Empty x242 = 242; + optional Empty x243 = 243; + optional Empty x244 = 244; + optional Empty x245 = 245; + optional Empty x246 = 246; + optional Empty x247 = 247; + optional Empty x248 = 248; + optional Empty x249 = 249; + optional Empty x250 = 250; +} + +message MessageList { + repeated group Message = 1 { + required string name = 2; + required int32 count = 3; + } +} + +message Strings { + optional string string_field = 1; + optional bytes bytes_field = 2; +} + +message Defaults { + enum Color { + RED = 0; + GREEN = 1; + BLUE = 2; + } + + // Default-valued fields of all basic types. + // Same as GoTest, but copied here to make testing easier. + optional bool F_Bool = 1 [default=true]; + optional int32 F_Int32 = 2 [default=32]; + optional int64 F_Int64 = 3 [default=64]; + optional fixed32 F_Fixed32 = 4 [default=320]; + optional fixed64 F_Fixed64 = 5 [default=640]; + optional uint32 F_Uint32 = 6 [default=3200]; + optional uint64 F_Uint64 = 7 [default=6400]; + optional float F_Float = 8 [default=314159.]; + optional double F_Double = 9 [default=271828.]; + optional string F_String = 10 [default="hello, \"world!\"\n"]; + optional bytes F_Bytes = 11 [default="Bignose"]; + optional sint32 F_Sint32 = 12 [default=-32]; + optional sint64 F_Sint64 = 13 [default=-64]; + optional Color F_Enum = 14 [default=GREEN]; + + // More fields with crazy defaults. + optional float F_Pinf = 15 [default=inf]; + optional float F_Ninf = 16 [default=-inf]; + optional float F_Nan = 17 [default=nan]; + + // Sub-message. + optional SubDefaults sub = 18; + + // Redundant but explicit defaults. + optional string str_zero = 19 [default=""]; +} + +message SubDefaults { + optional int64 n = 1 [default=7]; +} + +message RepeatedEnum { + enum Color { + RED = 1; + } + repeated Color color = 1; +} + +message MoreRepeated { + repeated bool bools = 1; + repeated bool bools_packed = 2 [packed=true]; + repeated int32 ints = 3; + repeated int32 ints_packed = 4 [packed=true]; + repeated int64 int64s_packed = 7 [packed=true]; + repeated string strings = 5; + repeated fixed32 fixeds = 6; +} + +// GroupOld and GroupNew have the same wire format. +// GroupNew has a new field inside a group. + +message GroupOld { + optional group G = 101 { + optional int32 x = 2; + } +} + +message GroupNew { + optional group G = 101 { + optional int32 x = 2; + optional int32 y = 3; + } +} + +message FloatingPoint { + required double f = 1; + optional bool exact = 2; +} + +message MessageWithMap { + map name_mapping = 1; + map msg_mapping = 2; + map byte_mapping = 3; + map str_to_str = 4; +} + +message Oneof { + oneof union { + bool F_Bool = 1; + int32 F_Int32 = 2; + int64 F_Int64 = 3; + fixed32 F_Fixed32 = 4; + fixed64 F_Fixed64 = 5; + uint32 F_Uint32 = 6; + uint64 F_Uint64 = 7; + float F_Float = 8; + double F_Double = 9; + string F_String = 10; + bytes F_Bytes = 11; + sint32 F_Sint32 = 12; + sint64 F_Sint64 = 13; + MyMessage.Color F_Enum = 14; + GoTestField F_Message = 15; + group F_Group = 16 { + optional int32 x = 17; + } + int32 F_Largest_Tag = 536870911; + } + + oneof tormato { + int32 value = 100; + } +} + +message Communique { + optional bool make_me_cry = 1; + + // This is a oneof, called "union". + oneof union { + int32 number = 5; + string name = 6; + bytes data = 7; + double temp_c = 8; + MyMessage.Color col = 9; + Strings msg = 10; + } +} diff --git a/vendor/github.com/golang/protobuf/proto/text.go b/vendor/github.com/golang/protobuf/proto/text.go new file mode 100644 index 000000000..965876bf0 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/text.go @@ -0,0 +1,854 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2010 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package proto + +// Functions for writing the text protocol buffer format. + +import ( + "bufio" + "bytes" + "encoding" + "errors" + "fmt" + "io" + "log" + "math" + "reflect" + "sort" + "strings" +) + +var ( + newline = []byte("\n") + spaces = []byte(" ") + gtNewline = []byte(">\n") + endBraceNewline = []byte("}\n") + backslashN = []byte{'\\', 'n'} + backslashR = []byte{'\\', 'r'} + backslashT = []byte{'\\', 't'} + backslashDQ = []byte{'\\', '"'} + backslashBS = []byte{'\\', '\\'} + posInf = []byte("inf") + negInf = []byte("-inf") + nan = []byte("nan") +) + +type writer interface { + io.Writer + WriteByte(byte) error +} + +// textWriter is an io.Writer that tracks its indentation level. +type textWriter struct { + ind int + complete bool // if the current position is a complete line + compact bool // whether to write out as a one-liner + w writer +} + +func (w *textWriter) WriteString(s string) (n int, err error) { + if !strings.Contains(s, "\n") { + if !w.compact && w.complete { + w.writeIndent() + } + w.complete = false + return io.WriteString(w.w, s) + } + // WriteString is typically called without newlines, so this + // codepath and its copy are rare. We copy to avoid + // duplicating all of Write's logic here. + return w.Write([]byte(s)) +} + +func (w *textWriter) Write(p []byte) (n int, err error) { + newlines := bytes.Count(p, newline) + if newlines == 0 { + if !w.compact && w.complete { + w.writeIndent() + } + n, err = w.w.Write(p) + w.complete = false + return n, err + } + + frags := bytes.SplitN(p, newline, newlines+1) + if w.compact { + for i, frag := range frags { + if i > 0 { + if err := w.w.WriteByte(' '); err != nil { + return n, err + } + n++ + } + nn, err := w.w.Write(frag) + n += nn + if err != nil { + return n, err + } + } + return n, nil + } + + for i, frag := range frags { + if w.complete { + w.writeIndent() + } + nn, err := w.w.Write(frag) + n += nn + if err != nil { + return n, err + } + if i+1 < len(frags) { + if err := w.w.WriteByte('\n'); err != nil { + return n, err + } + n++ + } + } + w.complete = len(frags[len(frags)-1]) == 0 + return n, nil +} + +func (w *textWriter) WriteByte(c byte) error { + if w.compact && c == '\n' { + c = ' ' + } + if !w.compact && w.complete { + w.writeIndent() + } + err := w.w.WriteByte(c) + w.complete = c == '\n' + return err +} + +func (w *textWriter) indent() { w.ind++ } + +func (w *textWriter) unindent() { + if w.ind == 0 { + log.Print("proto: textWriter unindented too far") + return + } + w.ind-- +} + +func writeName(w *textWriter, props *Properties) error { + if _, err := w.WriteString(props.OrigName); err != nil { + return err + } + if props.Wire != "group" { + return w.WriteByte(':') + } + return nil +} + +// raw is the interface satisfied by RawMessage. +type raw interface { + Bytes() []byte +} + +func requiresQuotes(u string) bool { + // When type URL contains any characters except [0-9A-Za-z./\-]*, it must be quoted. + for _, ch := range u { + switch { + case ch == '.' || ch == '/' || ch == '_': + continue + case '0' <= ch && ch <= '9': + continue + case 'A' <= ch && ch <= 'Z': + continue + case 'a' <= ch && ch <= 'z': + continue + default: + return true + } + } + return false +} + +// isAny reports whether sv is a google.protobuf.Any message +func isAny(sv reflect.Value) bool { + type wkt interface { + XXX_WellKnownType() string + } + t, ok := sv.Addr().Interface().(wkt) + return ok && t.XXX_WellKnownType() == "Any" +} + +// writeProto3Any writes an expanded google.protobuf.Any message. +// +// It returns (false, nil) if sv value can't be unmarshaled (e.g. because +// required messages are not linked in). +// +// It returns (true, error) when sv was written in expanded format or an error +// was encountered. +func (tm *TextMarshaler) writeProto3Any(w *textWriter, sv reflect.Value) (bool, error) { + turl := sv.FieldByName("TypeUrl") + val := sv.FieldByName("Value") + if !turl.IsValid() || !val.IsValid() { + return true, errors.New("proto: invalid google.protobuf.Any message") + } + + b, ok := val.Interface().([]byte) + if !ok { + return true, errors.New("proto: invalid google.protobuf.Any message") + } + + parts := strings.Split(turl.String(), "/") + mt := MessageType(parts[len(parts)-1]) + if mt == nil { + return false, nil + } + m := reflect.New(mt.Elem()) + if err := Unmarshal(b, m.Interface().(Message)); err != nil { + return false, nil + } + w.Write([]byte("[")) + u := turl.String() + if requiresQuotes(u) { + writeString(w, u) + } else { + w.Write([]byte(u)) + } + if w.compact { + w.Write([]byte("]:<")) + } else { + w.Write([]byte("]: <\n")) + w.ind++ + } + if err := tm.writeStruct(w, m.Elem()); err != nil { + return true, err + } + if w.compact { + w.Write([]byte("> ")) + } else { + w.ind-- + w.Write([]byte(">\n")) + } + return true, nil +} + +func (tm *TextMarshaler) writeStruct(w *textWriter, sv reflect.Value) error { + if tm.ExpandAny && isAny(sv) { + if canExpand, err := tm.writeProto3Any(w, sv); canExpand { + return err + } + } + st := sv.Type() + sprops := GetProperties(st) + for i := 0; i < sv.NumField(); i++ { + fv := sv.Field(i) + props := sprops.Prop[i] + name := st.Field(i).Name + + if strings.HasPrefix(name, "XXX_") { + // There are two XXX_ fields: + // XXX_unrecognized []byte + // XXX_extensions map[int32]proto.Extension + // The first is handled here; + // the second is handled at the bottom of this function. + if name == "XXX_unrecognized" && !fv.IsNil() { + if err := writeUnknownStruct(w, fv.Interface().([]byte)); err != nil { + return err + } + } + continue + } + if fv.Kind() == reflect.Ptr && fv.IsNil() { + // Field not filled in. This could be an optional field or + // a required field that wasn't filled in. Either way, there + // isn't anything we can show for it. + continue + } + if fv.Kind() == reflect.Slice && fv.IsNil() { + // Repeated field that is empty, or a bytes field that is unused. + continue + } + + if props.Repeated && fv.Kind() == reflect.Slice { + // Repeated field. + for j := 0; j < fv.Len(); j++ { + if err := writeName(w, props); err != nil { + return err + } + if !w.compact { + if err := w.WriteByte(' '); err != nil { + return err + } + } + v := fv.Index(j) + if v.Kind() == reflect.Ptr && v.IsNil() { + // A nil message in a repeated field is not valid, + // but we can handle that more gracefully than panicking. + if _, err := w.Write([]byte("\n")); err != nil { + return err + } + continue + } + if err := tm.writeAny(w, v, props); err != nil { + return err + } + if err := w.WriteByte('\n'); err != nil { + return err + } + } + continue + } + if fv.Kind() == reflect.Map { + // Map fields are rendered as a repeated struct with key/value fields. + keys := fv.MapKeys() + sort.Sort(mapKeys(keys)) + for _, key := range keys { + val := fv.MapIndex(key) + if err := writeName(w, props); err != nil { + return err + } + if !w.compact { + if err := w.WriteByte(' '); err != nil { + return err + } + } + // open struct + if err := w.WriteByte('<'); err != nil { + return err + } + if !w.compact { + if err := w.WriteByte('\n'); err != nil { + return err + } + } + w.indent() + // key + if _, err := w.WriteString("key:"); err != nil { + return err + } + if !w.compact { + if err := w.WriteByte(' '); err != nil { + return err + } + } + if err := tm.writeAny(w, key, props.mkeyprop); err != nil { + return err + } + if err := w.WriteByte('\n'); err != nil { + return err + } + // nil values aren't legal, but we can avoid panicking because of them. + if val.Kind() != reflect.Ptr || !val.IsNil() { + // value + if _, err := w.WriteString("value:"); err != nil { + return err + } + if !w.compact { + if err := w.WriteByte(' '); err != nil { + return err + } + } + if err := tm.writeAny(w, val, props.mvalprop); err != nil { + return err + } + if err := w.WriteByte('\n'); err != nil { + return err + } + } + // close struct + w.unindent() + if err := w.WriteByte('>'); err != nil { + return err + } + if err := w.WriteByte('\n'); err != nil { + return err + } + } + continue + } + if props.proto3 && fv.Kind() == reflect.Slice && fv.Len() == 0 { + // empty bytes field + continue + } + if fv.Kind() != reflect.Ptr && fv.Kind() != reflect.Slice { + // proto3 non-repeated scalar field; skip if zero value + if isProto3Zero(fv) { + continue + } + } + + if fv.Kind() == reflect.Interface { + // Check if it is a oneof. + if st.Field(i).Tag.Get("protobuf_oneof") != "" { + // fv is nil, or holds a pointer to generated struct. + // That generated struct has exactly one field, + // which has a protobuf struct tag. + if fv.IsNil() { + continue + } + inner := fv.Elem().Elem() // interface -> *T -> T + tag := inner.Type().Field(0).Tag.Get("protobuf") + props = new(Properties) // Overwrite the outer props var, but not its pointee. + props.Parse(tag) + // Write the value in the oneof, not the oneof itself. + fv = inner.Field(0) + + // Special case to cope with malformed messages gracefully: + // If the value in the oneof is a nil pointer, don't panic + // in writeAny. + if fv.Kind() == reflect.Ptr && fv.IsNil() { + // Use errors.New so writeAny won't render quotes. + msg := errors.New("/* nil */") + fv = reflect.ValueOf(&msg).Elem() + } + } + } + + if err := writeName(w, props); err != nil { + return err + } + if !w.compact { + if err := w.WriteByte(' '); err != nil { + return err + } + } + if b, ok := fv.Interface().(raw); ok { + if err := writeRaw(w, b.Bytes()); err != nil { + return err + } + continue + } + + // Enums have a String method, so writeAny will work fine. + if err := tm.writeAny(w, fv, props); err != nil { + return err + } + + if err := w.WriteByte('\n'); err != nil { + return err + } + } + + // Extensions (the XXX_extensions field). + pv := sv.Addr() + if _, ok := extendable(pv.Interface()); ok { + if err := tm.writeExtensions(w, pv); err != nil { + return err + } + } + + return nil +} + +// writeRaw writes an uninterpreted raw message. +func writeRaw(w *textWriter, b []byte) error { + if err := w.WriteByte('<'); err != nil { + return err + } + if !w.compact { + if err := w.WriteByte('\n'); err != nil { + return err + } + } + w.indent() + if err := writeUnknownStruct(w, b); err != nil { + return err + } + w.unindent() + if err := w.WriteByte('>'); err != nil { + return err + } + return nil +} + +// writeAny writes an arbitrary field. +func (tm *TextMarshaler) writeAny(w *textWriter, v reflect.Value, props *Properties) error { + v = reflect.Indirect(v) + + // Floats have special cases. + if v.Kind() == reflect.Float32 || v.Kind() == reflect.Float64 { + x := v.Float() + var b []byte + switch { + case math.IsInf(x, 1): + b = posInf + case math.IsInf(x, -1): + b = negInf + case math.IsNaN(x): + b = nan + } + if b != nil { + _, err := w.Write(b) + return err + } + // Other values are handled below. + } + + // We don't attempt to serialise every possible value type; only those + // that can occur in protocol buffers. + switch v.Kind() { + case reflect.Slice: + // Should only be a []byte; repeated fields are handled in writeStruct. + if err := writeString(w, string(v.Bytes())); err != nil { + return err + } + case reflect.String: + if err := writeString(w, v.String()); err != nil { + return err + } + case reflect.Struct: + // Required/optional group/message. + var bra, ket byte = '<', '>' + if props != nil && props.Wire == "group" { + bra, ket = '{', '}' + } + if err := w.WriteByte(bra); err != nil { + return err + } + if !w.compact { + if err := w.WriteByte('\n'); err != nil { + return err + } + } + w.indent() + if etm, ok := v.Interface().(encoding.TextMarshaler); ok { + text, err := etm.MarshalText() + if err != nil { + return err + } + if _, err = w.Write(text); err != nil { + return err + } + } else if err := tm.writeStruct(w, v); err != nil { + return err + } + w.unindent() + if err := w.WriteByte(ket); err != nil { + return err + } + default: + _, err := fmt.Fprint(w, v.Interface()) + return err + } + return nil +} + +// equivalent to C's isprint. +func isprint(c byte) bool { + return c >= 0x20 && c < 0x7f +} + +// writeString writes a string in the protocol buffer text format. +// It is similar to strconv.Quote except we don't use Go escape sequences, +// we treat the string as a byte sequence, and we use octal escapes. +// These differences are to maintain interoperability with the other +// languages' implementations of the text format. +func writeString(w *textWriter, s string) error { + // use WriteByte here to get any needed indent + if err := w.WriteByte('"'); err != nil { + return err + } + // Loop over the bytes, not the runes. + for i := 0; i < len(s); i++ { + var err error + // Divergence from C++: we don't escape apostrophes. + // There's no need to escape them, and the C++ parser + // copes with a naked apostrophe. + switch c := s[i]; c { + case '\n': + _, err = w.w.Write(backslashN) + case '\r': + _, err = w.w.Write(backslashR) + case '\t': + _, err = w.w.Write(backslashT) + case '"': + _, err = w.w.Write(backslashDQ) + case '\\': + _, err = w.w.Write(backslashBS) + default: + if isprint(c) { + err = w.w.WriteByte(c) + } else { + _, err = fmt.Fprintf(w.w, "\\%03o", c) + } + } + if err != nil { + return err + } + } + return w.WriteByte('"') +} + +func writeUnknownStruct(w *textWriter, data []byte) (err error) { + if !w.compact { + if _, err := fmt.Fprintf(w, "/* %d unknown bytes */\n", len(data)); err != nil { + return err + } + } + b := NewBuffer(data) + for b.index < len(b.buf) { + x, err := b.DecodeVarint() + if err != nil { + _, err := fmt.Fprintf(w, "/* %v */\n", err) + return err + } + wire, tag := x&7, x>>3 + if wire == WireEndGroup { + w.unindent() + if _, err := w.Write(endBraceNewline); err != nil { + return err + } + continue + } + if _, err := fmt.Fprint(w, tag); err != nil { + return err + } + if wire != WireStartGroup { + if err := w.WriteByte(':'); err != nil { + return err + } + } + if !w.compact || wire == WireStartGroup { + if err := w.WriteByte(' '); err != nil { + return err + } + } + switch wire { + case WireBytes: + buf, e := b.DecodeRawBytes(false) + if e == nil { + _, err = fmt.Fprintf(w, "%q", buf) + } else { + _, err = fmt.Fprintf(w, "/* %v */", e) + } + case WireFixed32: + x, err = b.DecodeFixed32() + err = writeUnknownInt(w, x, err) + case WireFixed64: + x, err = b.DecodeFixed64() + err = writeUnknownInt(w, x, err) + case WireStartGroup: + err = w.WriteByte('{') + w.indent() + case WireVarint: + x, err = b.DecodeVarint() + err = writeUnknownInt(w, x, err) + default: + _, err = fmt.Fprintf(w, "/* unknown wire type %d */", wire) + } + if err != nil { + return err + } + if err = w.WriteByte('\n'); err != nil { + return err + } + } + return nil +} + +func writeUnknownInt(w *textWriter, x uint64, err error) error { + if err == nil { + _, err = fmt.Fprint(w, x) + } else { + _, err = fmt.Fprintf(w, "/* %v */", err) + } + return err +} + +type int32Slice []int32 + +func (s int32Slice) Len() int { return len(s) } +func (s int32Slice) Less(i, j int) bool { return s[i] < s[j] } +func (s int32Slice) Swap(i, j int) { s[i], s[j] = s[j], s[i] } + +// writeExtensions writes all the extensions in pv. +// pv is assumed to be a pointer to a protocol message struct that is extendable. +func (tm *TextMarshaler) writeExtensions(w *textWriter, pv reflect.Value) error { + emap := extensionMaps[pv.Type().Elem()] + ep, _ := extendable(pv.Interface()) + + // Order the extensions by ID. + // This isn't strictly necessary, but it will give us + // canonical output, which will also make testing easier. + m, mu := ep.extensionsRead() + if m == nil { + return nil + } + mu.Lock() + ids := make([]int32, 0, len(m)) + for id := range m { + ids = append(ids, id) + } + sort.Sort(int32Slice(ids)) + mu.Unlock() + + for _, extNum := range ids { + ext := m[extNum] + var desc *ExtensionDesc + if emap != nil { + desc = emap[extNum] + } + if desc == nil { + // Unknown extension. + if err := writeUnknownStruct(w, ext.enc); err != nil { + return err + } + continue + } + + pb, err := GetExtension(ep, desc) + if err != nil { + return fmt.Errorf("failed getting extension: %v", err) + } + + // Repeated extensions will appear as a slice. + if !desc.repeated() { + if err := tm.writeExtension(w, desc.Name, pb); err != nil { + return err + } + } else { + v := reflect.ValueOf(pb) + for i := 0; i < v.Len(); i++ { + if err := tm.writeExtension(w, desc.Name, v.Index(i).Interface()); err != nil { + return err + } + } + } + } + return nil +} + +func (tm *TextMarshaler) writeExtension(w *textWriter, name string, pb interface{}) error { + if _, err := fmt.Fprintf(w, "[%s]:", name); err != nil { + return err + } + if !w.compact { + if err := w.WriteByte(' '); err != nil { + return err + } + } + if err := tm.writeAny(w, reflect.ValueOf(pb), nil); err != nil { + return err + } + if err := w.WriteByte('\n'); err != nil { + return err + } + return nil +} + +func (w *textWriter) writeIndent() { + if !w.complete { + return + } + remain := w.ind * 2 + for remain > 0 { + n := remain + if n > len(spaces) { + n = len(spaces) + } + w.w.Write(spaces[:n]) + remain -= n + } + w.complete = false +} + +// TextMarshaler is a configurable text format marshaler. +type TextMarshaler struct { + Compact bool // use compact text format (one line). + ExpandAny bool // expand google.protobuf.Any messages of known types +} + +// Marshal writes a given protocol buffer in text format. +// The only errors returned are from w. +func (tm *TextMarshaler) Marshal(w io.Writer, pb Message) error { + val := reflect.ValueOf(pb) + if pb == nil || val.IsNil() { + w.Write([]byte("")) + return nil + } + var bw *bufio.Writer + ww, ok := w.(writer) + if !ok { + bw = bufio.NewWriter(w) + ww = bw + } + aw := &textWriter{ + w: ww, + complete: true, + compact: tm.Compact, + } + + if etm, ok := pb.(encoding.TextMarshaler); ok { + text, err := etm.MarshalText() + if err != nil { + return err + } + if _, err = aw.Write(text); err != nil { + return err + } + if bw != nil { + return bw.Flush() + } + return nil + } + // Dereference the received pointer so we don't have outer < and >. + v := reflect.Indirect(val) + if err := tm.writeStruct(aw, v); err != nil { + return err + } + if bw != nil { + return bw.Flush() + } + return nil +} + +// Text is the same as Marshal, but returns the string directly. +func (tm *TextMarshaler) Text(pb Message) string { + var buf bytes.Buffer + tm.Marshal(&buf, pb) + return buf.String() +} + +var ( + defaultTextMarshaler = TextMarshaler{} + compactTextMarshaler = TextMarshaler{Compact: true} +) + +// TODO: consider removing some of the Marshal functions below. + +// MarshalText writes a given protocol buffer in text format. +// The only errors returned are from w. +func MarshalText(w io.Writer, pb Message) error { return defaultTextMarshaler.Marshal(w, pb) } + +// MarshalTextString is the same as MarshalText, but returns the string directly. +func MarshalTextString(pb Message) string { return defaultTextMarshaler.Text(pb) } + +// CompactText writes a given protocol buffer in compact text format (one line). +func CompactText(w io.Writer, pb Message) error { return compactTextMarshaler.Marshal(w, pb) } + +// CompactTextString is the same as CompactText, but returns the string directly. +func CompactTextString(pb Message) string { return compactTextMarshaler.Text(pb) } diff --git a/vendor/github.com/golang/protobuf/proto/text_parser.go b/vendor/github.com/golang/protobuf/proto/text_parser.go new file mode 100644 index 000000000..5e14513f2 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/text_parser.go @@ -0,0 +1,895 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2010 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package proto + +// Functions for parsing the Text protocol buffer format. +// TODO: message sets. + +import ( + "encoding" + "errors" + "fmt" + "reflect" + "strconv" + "strings" + "unicode/utf8" +) + +// Error string emitted when deserializing Any and fields are already set +const anyRepeatedlyUnpacked = "Any message unpacked multiple times, or %q already set" + +type ParseError struct { + Message string + Line int // 1-based line number + Offset int // 0-based byte offset from start of input +} + +func (p *ParseError) Error() string { + if p.Line == 1 { + // show offset only for first line + return fmt.Sprintf("line 1.%d: %v", p.Offset, p.Message) + } + return fmt.Sprintf("line %d: %v", p.Line, p.Message) +} + +type token struct { + value string + err *ParseError + line int // line number + offset int // byte number from start of input, not start of line + unquoted string // the unquoted version of value, if it was a quoted string +} + +func (t *token) String() string { + if t.err == nil { + return fmt.Sprintf("%q (line=%d, offset=%d)", t.value, t.line, t.offset) + } + return fmt.Sprintf("parse error: %v", t.err) +} + +type textParser struct { + s string // remaining input + done bool // whether the parsing is finished (success or error) + backed bool // whether back() was called + offset, line int + cur token +} + +func newTextParser(s string) *textParser { + p := new(textParser) + p.s = s + p.line = 1 + p.cur.line = 1 + return p +} + +func (p *textParser) errorf(format string, a ...interface{}) *ParseError { + pe := &ParseError{fmt.Sprintf(format, a...), p.cur.line, p.cur.offset} + p.cur.err = pe + p.done = true + return pe +} + +// Numbers and identifiers are matched by [-+._A-Za-z0-9] +func isIdentOrNumberChar(c byte) bool { + switch { + case 'A' <= c && c <= 'Z', 'a' <= c && c <= 'z': + return true + case '0' <= c && c <= '9': + return true + } + switch c { + case '-', '+', '.', '_': + return true + } + return false +} + +func isWhitespace(c byte) bool { + switch c { + case ' ', '\t', '\n', '\r': + return true + } + return false +} + +func isQuote(c byte) bool { + switch c { + case '"', '\'': + return true + } + return false +} + +func (p *textParser) skipWhitespace() { + i := 0 + for i < len(p.s) && (isWhitespace(p.s[i]) || p.s[i] == '#') { + if p.s[i] == '#' { + // comment; skip to end of line or input + for i < len(p.s) && p.s[i] != '\n' { + i++ + } + if i == len(p.s) { + break + } + } + if p.s[i] == '\n' { + p.line++ + } + i++ + } + p.offset += i + p.s = p.s[i:len(p.s)] + if len(p.s) == 0 { + p.done = true + } +} + +func (p *textParser) advance() { + // Skip whitespace + p.skipWhitespace() + if p.done { + return + } + + // Start of non-whitespace + p.cur.err = nil + p.cur.offset, p.cur.line = p.offset, p.line + p.cur.unquoted = "" + switch p.s[0] { + case '<', '>', '{', '}', ':', '[', ']', ';', ',', '/': + // Single symbol + p.cur.value, p.s = p.s[0:1], p.s[1:len(p.s)] + case '"', '\'': + // Quoted string + i := 1 + for i < len(p.s) && p.s[i] != p.s[0] && p.s[i] != '\n' { + if p.s[i] == '\\' && i+1 < len(p.s) { + // skip escaped char + i++ + } + i++ + } + if i >= len(p.s) || p.s[i] != p.s[0] { + p.errorf("unmatched quote") + return + } + unq, err := unquoteC(p.s[1:i], rune(p.s[0])) + if err != nil { + p.errorf("invalid quoted string %s: %v", p.s[0:i+1], err) + return + } + p.cur.value, p.s = p.s[0:i+1], p.s[i+1:len(p.s)] + p.cur.unquoted = unq + default: + i := 0 + for i < len(p.s) && isIdentOrNumberChar(p.s[i]) { + i++ + } + if i == 0 { + p.errorf("unexpected byte %#x", p.s[0]) + return + } + p.cur.value, p.s = p.s[0:i], p.s[i:len(p.s)] + } + p.offset += len(p.cur.value) +} + +var ( + errBadUTF8 = errors.New("proto: bad UTF-8") + errBadHex = errors.New("proto: bad hexadecimal") +) + +func unquoteC(s string, quote rune) (string, error) { + // This is based on C++'s tokenizer.cc. + // Despite its name, this is *not* parsing C syntax. + // For instance, "\0" is an invalid quoted string. + + // Avoid allocation in trivial cases. + simple := true + for _, r := range s { + if r == '\\' || r == quote { + simple = false + break + } + } + if simple { + return s, nil + } + + buf := make([]byte, 0, 3*len(s)/2) + for len(s) > 0 { + r, n := utf8.DecodeRuneInString(s) + if r == utf8.RuneError && n == 1 { + return "", errBadUTF8 + } + s = s[n:] + if r != '\\' { + if r < utf8.RuneSelf { + buf = append(buf, byte(r)) + } else { + buf = append(buf, string(r)...) + } + continue + } + + ch, tail, err := unescape(s) + if err != nil { + return "", err + } + buf = append(buf, ch...) + s = tail + } + return string(buf), nil +} + +func unescape(s string) (ch string, tail string, err error) { + r, n := utf8.DecodeRuneInString(s) + if r == utf8.RuneError && n == 1 { + return "", "", errBadUTF8 + } + s = s[n:] + switch r { + case 'a': + return "\a", s, nil + case 'b': + return "\b", s, nil + case 'f': + return "\f", s, nil + case 'n': + return "\n", s, nil + case 'r': + return "\r", s, nil + case 't': + return "\t", s, nil + case 'v': + return "\v", s, nil + case '?': + return "?", s, nil // trigraph workaround + case '\'', '"', '\\': + return string(r), s, nil + case '0', '1', '2', '3', '4', '5', '6', '7', 'x', 'X': + if len(s) < 2 { + return "", "", fmt.Errorf(`\%c requires 2 following digits`, r) + } + base := 8 + ss := s[:2] + s = s[2:] + if r == 'x' || r == 'X' { + base = 16 + } else { + ss = string(r) + ss + } + i, err := strconv.ParseUint(ss, base, 8) + if err != nil { + return "", "", err + } + return string([]byte{byte(i)}), s, nil + case 'u', 'U': + n := 4 + if r == 'U' { + n = 8 + } + if len(s) < n { + return "", "", fmt.Errorf(`\%c requires %d digits`, r, n) + } + + bs := make([]byte, n/2) + for i := 0; i < n; i += 2 { + a, ok1 := unhex(s[i]) + b, ok2 := unhex(s[i+1]) + if !ok1 || !ok2 { + return "", "", errBadHex + } + bs[i/2] = a<<4 | b + } + s = s[n:] + return string(bs), s, nil + } + return "", "", fmt.Errorf(`unknown escape \%c`, r) +} + +// Adapted from src/pkg/strconv/quote.go. +func unhex(b byte) (v byte, ok bool) { + switch { + case '0' <= b && b <= '9': + return b - '0', true + case 'a' <= b && b <= 'f': + return b - 'a' + 10, true + case 'A' <= b && b <= 'F': + return b - 'A' + 10, true + } + return 0, false +} + +// Back off the parser by one token. Can only be done between calls to next(). +// It makes the next advance() a no-op. +func (p *textParser) back() { p.backed = true } + +// Advances the parser and returns the new current token. +func (p *textParser) next() *token { + if p.backed || p.done { + p.backed = false + return &p.cur + } + p.advance() + if p.done { + p.cur.value = "" + } else if len(p.cur.value) > 0 && isQuote(p.cur.value[0]) { + // Look for multiple quoted strings separated by whitespace, + // and concatenate them. + cat := p.cur + for { + p.skipWhitespace() + if p.done || !isQuote(p.s[0]) { + break + } + p.advance() + if p.cur.err != nil { + return &p.cur + } + cat.value += " " + p.cur.value + cat.unquoted += p.cur.unquoted + } + p.done = false // parser may have seen EOF, but we want to return cat + p.cur = cat + } + return &p.cur +} + +func (p *textParser) consumeToken(s string) error { + tok := p.next() + if tok.err != nil { + return tok.err + } + if tok.value != s { + p.back() + return p.errorf("expected %q, found %q", s, tok.value) + } + return nil +} + +// Return a RequiredNotSetError indicating which required field was not set. +func (p *textParser) missingRequiredFieldError(sv reflect.Value) *RequiredNotSetError { + st := sv.Type() + sprops := GetProperties(st) + for i := 0; i < st.NumField(); i++ { + if !isNil(sv.Field(i)) { + continue + } + + props := sprops.Prop[i] + if props.Required { + return &RequiredNotSetError{fmt.Sprintf("%v.%v", st, props.OrigName)} + } + } + return &RequiredNotSetError{fmt.Sprintf("%v.", st)} // should not happen +} + +// Returns the index in the struct for the named field, as well as the parsed tag properties. +func structFieldByName(sprops *StructProperties, name string) (int, *Properties, bool) { + i, ok := sprops.decoderOrigNames[name] + if ok { + return i, sprops.Prop[i], true + } + return -1, nil, false +} + +// Consume a ':' from the input stream (if the next token is a colon), +// returning an error if a colon is needed but not present. +func (p *textParser) checkForColon(props *Properties, typ reflect.Type) *ParseError { + tok := p.next() + if tok.err != nil { + return tok.err + } + if tok.value != ":" { + // Colon is optional when the field is a group or message. + needColon := true + switch props.Wire { + case "group": + needColon = false + case "bytes": + // A "bytes" field is either a message, a string, or a repeated field; + // those three become *T, *string and []T respectively, so we can check for + // this field being a pointer to a non-string. + if typ.Kind() == reflect.Ptr { + // *T or *string + if typ.Elem().Kind() == reflect.String { + break + } + } else if typ.Kind() == reflect.Slice { + // []T or []*T + if typ.Elem().Kind() != reflect.Ptr { + break + } + } else if typ.Kind() == reflect.String { + // The proto3 exception is for a string field, + // which requires a colon. + break + } + needColon = false + } + if needColon { + return p.errorf("expected ':', found %q", tok.value) + } + p.back() + } + return nil +} + +func (p *textParser) readStruct(sv reflect.Value, terminator string) error { + st := sv.Type() + sprops := GetProperties(st) + reqCount := sprops.reqCount + var reqFieldErr error + fieldSet := make(map[string]bool) + // A struct is a sequence of "name: value", terminated by one of + // '>' or '}', or the end of the input. A name may also be + // "[extension]" or "[type/url]". + // + // The whole struct can also be an expanded Any message, like: + // [type/url] < ... struct contents ... > + for { + tok := p.next() + if tok.err != nil { + return tok.err + } + if tok.value == terminator { + break + } + if tok.value == "[" { + // Looks like an extension or an Any. + // + // TODO: Check whether we need to handle + // namespace rooted names (e.g. ".something.Foo"). + extName, err := p.consumeExtName() + if err != nil { + return err + } + + if s := strings.LastIndex(extName, "/"); s >= 0 { + // If it contains a slash, it's an Any type URL. + messageName := extName[s+1:] + mt := MessageType(messageName) + if mt == nil { + return p.errorf("unrecognized message %q in google.protobuf.Any", messageName) + } + tok = p.next() + if tok.err != nil { + return tok.err + } + // consume an optional colon + if tok.value == ":" { + tok = p.next() + if tok.err != nil { + return tok.err + } + } + var terminator string + switch tok.value { + case "<": + terminator = ">" + case "{": + terminator = "}" + default: + return p.errorf("expected '{' or '<', found %q", tok.value) + } + v := reflect.New(mt.Elem()) + if pe := p.readStruct(v.Elem(), terminator); pe != nil { + return pe + } + b, err := Marshal(v.Interface().(Message)) + if err != nil { + return p.errorf("failed to marshal message of type %q: %v", messageName, err) + } + if fieldSet["type_url"] { + return p.errorf(anyRepeatedlyUnpacked, "type_url") + } + if fieldSet["value"] { + return p.errorf(anyRepeatedlyUnpacked, "value") + } + sv.FieldByName("TypeUrl").SetString(extName) + sv.FieldByName("Value").SetBytes(b) + fieldSet["type_url"] = true + fieldSet["value"] = true + continue + } + + var desc *ExtensionDesc + // This could be faster, but it's functional. + // TODO: Do something smarter than a linear scan. + for _, d := range RegisteredExtensions(reflect.New(st).Interface().(Message)) { + if d.Name == extName { + desc = d + break + } + } + if desc == nil { + return p.errorf("unrecognized extension %q", extName) + } + + props := &Properties{} + props.Parse(desc.Tag) + + typ := reflect.TypeOf(desc.ExtensionType) + if err := p.checkForColon(props, typ); err != nil { + return err + } + + rep := desc.repeated() + + // Read the extension structure, and set it in + // the value we're constructing. + var ext reflect.Value + if !rep { + ext = reflect.New(typ).Elem() + } else { + ext = reflect.New(typ.Elem()).Elem() + } + if err := p.readAny(ext, props); err != nil { + if _, ok := err.(*RequiredNotSetError); !ok { + return err + } + reqFieldErr = err + } + ep := sv.Addr().Interface().(Message) + if !rep { + SetExtension(ep, desc, ext.Interface()) + } else { + old, err := GetExtension(ep, desc) + var sl reflect.Value + if err == nil { + sl = reflect.ValueOf(old) // existing slice + } else { + sl = reflect.MakeSlice(typ, 0, 1) + } + sl = reflect.Append(sl, ext) + SetExtension(ep, desc, sl.Interface()) + } + if err := p.consumeOptionalSeparator(); err != nil { + return err + } + continue + } + + // This is a normal, non-extension field. + name := tok.value + var dst reflect.Value + fi, props, ok := structFieldByName(sprops, name) + if ok { + dst = sv.Field(fi) + } else if oop, ok := sprops.OneofTypes[name]; ok { + // It is a oneof. + props = oop.Prop + nv := reflect.New(oop.Type.Elem()) + dst = nv.Elem().Field(0) + field := sv.Field(oop.Field) + if !field.IsNil() { + return p.errorf("field '%s' would overwrite already parsed oneof '%s'", name, sv.Type().Field(oop.Field).Name) + } + field.Set(nv) + } + if !dst.IsValid() { + return p.errorf("unknown field name %q in %v", name, st) + } + + if dst.Kind() == reflect.Map { + // Consume any colon. + if err := p.checkForColon(props, dst.Type()); err != nil { + return err + } + + // Construct the map if it doesn't already exist. + if dst.IsNil() { + dst.Set(reflect.MakeMap(dst.Type())) + } + key := reflect.New(dst.Type().Key()).Elem() + val := reflect.New(dst.Type().Elem()).Elem() + + // The map entry should be this sequence of tokens: + // < key : KEY value : VALUE > + // However, implementations may omit key or value, and technically + // we should support them in any order. See b/28924776 for a time + // this went wrong. + + tok := p.next() + var terminator string + switch tok.value { + case "<": + terminator = ">" + case "{": + terminator = "}" + default: + return p.errorf("expected '{' or '<', found %q", tok.value) + } + for { + tok := p.next() + if tok.err != nil { + return tok.err + } + if tok.value == terminator { + break + } + switch tok.value { + case "key": + if err := p.consumeToken(":"); err != nil { + return err + } + if err := p.readAny(key, props.mkeyprop); err != nil { + return err + } + if err := p.consumeOptionalSeparator(); err != nil { + return err + } + case "value": + if err := p.checkForColon(props.mvalprop, dst.Type().Elem()); err != nil { + return err + } + if err := p.readAny(val, props.mvalprop); err != nil { + return err + } + if err := p.consumeOptionalSeparator(); err != nil { + return err + } + default: + p.back() + return p.errorf(`expected "key", "value", or %q, found %q`, terminator, tok.value) + } + } + + dst.SetMapIndex(key, val) + continue + } + + // Check that it's not already set if it's not a repeated field. + if !props.Repeated && fieldSet[name] { + return p.errorf("non-repeated field %q was repeated", name) + } + + if err := p.checkForColon(props, dst.Type()); err != nil { + return err + } + + // Parse into the field. + fieldSet[name] = true + if err := p.readAny(dst, props); err != nil { + if _, ok := err.(*RequiredNotSetError); !ok { + return err + } + reqFieldErr = err + } + if props.Required { + reqCount-- + } + + if err := p.consumeOptionalSeparator(); err != nil { + return err + } + + } + + if reqCount > 0 { + return p.missingRequiredFieldError(sv) + } + return reqFieldErr +} + +// consumeExtName consumes extension name or expanded Any type URL and the +// following ']'. It returns the name or URL consumed. +func (p *textParser) consumeExtName() (string, error) { + tok := p.next() + if tok.err != nil { + return "", tok.err + } + + // If extension name or type url is quoted, it's a single token. + if len(tok.value) > 2 && isQuote(tok.value[0]) && tok.value[len(tok.value)-1] == tok.value[0] { + name, err := unquoteC(tok.value[1:len(tok.value)-1], rune(tok.value[0])) + if err != nil { + return "", err + } + return name, p.consumeToken("]") + } + + // Consume everything up to "]" + var parts []string + for tok.value != "]" { + parts = append(parts, tok.value) + tok = p.next() + if tok.err != nil { + return "", p.errorf("unrecognized type_url or extension name: %s", tok.err) + } + } + return strings.Join(parts, ""), nil +} + +// consumeOptionalSeparator consumes an optional semicolon or comma. +// It is used in readStruct to provide backward compatibility. +func (p *textParser) consumeOptionalSeparator() error { + tok := p.next() + if tok.err != nil { + return tok.err + } + if tok.value != ";" && tok.value != "," { + p.back() + } + return nil +} + +func (p *textParser) readAny(v reflect.Value, props *Properties) error { + tok := p.next() + if tok.err != nil { + return tok.err + } + if tok.value == "" { + return p.errorf("unexpected EOF") + } + + switch fv := v; fv.Kind() { + case reflect.Slice: + at := v.Type() + if at.Elem().Kind() == reflect.Uint8 { + // Special case for []byte + if tok.value[0] != '"' && tok.value[0] != '\'' { + // Deliberately written out here, as the error after + // this switch statement would write "invalid []byte: ...", + // which is not as user-friendly. + return p.errorf("invalid string: %v", tok.value) + } + bytes := []byte(tok.unquoted) + fv.Set(reflect.ValueOf(bytes)) + return nil + } + // Repeated field. + if tok.value == "[" { + // Repeated field with list notation, like [1,2,3]. + for { + fv.Set(reflect.Append(fv, reflect.New(at.Elem()).Elem())) + err := p.readAny(fv.Index(fv.Len()-1), props) + if err != nil { + return err + } + tok := p.next() + if tok.err != nil { + return tok.err + } + if tok.value == "]" { + break + } + if tok.value != "," { + return p.errorf("Expected ']' or ',' found %q", tok.value) + } + } + return nil + } + // One value of the repeated field. + p.back() + fv.Set(reflect.Append(fv, reflect.New(at.Elem()).Elem())) + return p.readAny(fv.Index(fv.Len()-1), props) + case reflect.Bool: + // true/1/t/True or false/f/0/False. + switch tok.value { + case "true", "1", "t", "True": + fv.SetBool(true) + return nil + case "false", "0", "f", "False": + fv.SetBool(false) + return nil + } + case reflect.Float32, reflect.Float64: + v := tok.value + // Ignore 'f' for compatibility with output generated by C++, but don't + // remove 'f' when the value is "-inf" or "inf". + if strings.HasSuffix(v, "f") && tok.value != "-inf" && tok.value != "inf" { + v = v[:len(v)-1] + } + if f, err := strconv.ParseFloat(v, fv.Type().Bits()); err == nil { + fv.SetFloat(f) + return nil + } + case reflect.Int32: + if x, err := strconv.ParseInt(tok.value, 0, 32); err == nil { + fv.SetInt(x) + return nil + } + + if len(props.Enum) == 0 { + break + } + m, ok := enumValueMaps[props.Enum] + if !ok { + break + } + x, ok := m[tok.value] + if !ok { + break + } + fv.SetInt(int64(x)) + return nil + case reflect.Int64: + if x, err := strconv.ParseInt(tok.value, 0, 64); err == nil { + fv.SetInt(x) + return nil + } + + case reflect.Ptr: + // A basic field (indirected through pointer), or a repeated message/group + p.back() + fv.Set(reflect.New(fv.Type().Elem())) + return p.readAny(fv.Elem(), props) + case reflect.String: + if tok.value[0] == '"' || tok.value[0] == '\'' { + fv.SetString(tok.unquoted) + return nil + } + case reflect.Struct: + var terminator string + switch tok.value { + case "{": + terminator = "}" + case "<": + terminator = ">" + default: + return p.errorf("expected '{' or '<', found %q", tok.value) + } + // TODO: Handle nested messages which implement encoding.TextUnmarshaler. + return p.readStruct(fv, terminator) + case reflect.Uint32: + if x, err := strconv.ParseUint(tok.value, 0, 32); err == nil { + fv.SetUint(x) + return nil + } + case reflect.Uint64: + if x, err := strconv.ParseUint(tok.value, 0, 64); err == nil { + fv.SetUint(x) + return nil + } + } + return p.errorf("invalid %v: %v", v.Type(), tok.value) +} + +// UnmarshalText reads a protocol buffer in Text format. UnmarshalText resets pb +// before starting to unmarshal, so any existing data in pb is always removed. +// If a required field is not set and no other error occurs, +// UnmarshalText returns *RequiredNotSetError. +func UnmarshalText(s string, pb Message) error { + if um, ok := pb.(encoding.TextUnmarshaler); ok { + err := um.UnmarshalText([]byte(s)) + return err + } + pb.Reset() + v := reflect.ValueOf(pb) + if pe := newTextParser(s).readStruct(v.Elem(), ""); pe != nil { + return pe + } + return nil +} diff --git a/vendor/github.com/golang/protobuf/proto/text_parser_test.go b/vendor/github.com/golang/protobuf/proto/text_parser_test.go new file mode 100644 index 000000000..8f7cb4d27 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/text_parser_test.go @@ -0,0 +1,673 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2010 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package proto_test + +import ( + "math" + "reflect" + "testing" + + . "github.com/golang/protobuf/proto" + proto3pb "github.com/golang/protobuf/proto/proto3_proto" + . "github.com/golang/protobuf/proto/testdata" +) + +type UnmarshalTextTest struct { + in string + err string // if "", no error expected + out *MyMessage +} + +func buildExtStructTest(text string) UnmarshalTextTest { + msg := &MyMessage{ + Count: Int32(42), + } + SetExtension(msg, E_Ext_More, &Ext{ + Data: String("Hello, world!"), + }) + return UnmarshalTextTest{in: text, out: msg} +} + +func buildExtDataTest(text string) UnmarshalTextTest { + msg := &MyMessage{ + Count: Int32(42), + } + SetExtension(msg, E_Ext_Text, String("Hello, world!")) + SetExtension(msg, E_Ext_Number, Int32(1729)) + return UnmarshalTextTest{in: text, out: msg} +} + +func buildExtRepStringTest(text string) UnmarshalTextTest { + msg := &MyMessage{ + Count: Int32(42), + } + if err := SetExtension(msg, E_Greeting, []string{"bula", "hola"}); err != nil { + panic(err) + } + return UnmarshalTextTest{in: text, out: msg} +} + +var unMarshalTextTests = []UnmarshalTextTest{ + // Basic + { + in: " count:42\n name:\"Dave\" ", + out: &MyMessage{ + Count: Int32(42), + Name: String("Dave"), + }, + }, + + // Empty quoted string + { + in: `count:42 name:""`, + out: &MyMessage{ + Count: Int32(42), + Name: String(""), + }, + }, + + // Quoted string concatenation with double quotes + { + in: `count:42 name: "My name is "` + "\n" + `"elsewhere"`, + out: &MyMessage{ + Count: Int32(42), + Name: String("My name is elsewhere"), + }, + }, + + // Quoted string concatenation with single quotes + { + in: "count:42 name: 'My name is '\n'elsewhere'", + out: &MyMessage{ + Count: Int32(42), + Name: String("My name is elsewhere"), + }, + }, + + // Quoted string concatenations with mixed quotes + { + in: "count:42 name: 'My name is '\n\"elsewhere\"", + out: &MyMessage{ + Count: Int32(42), + Name: String("My name is elsewhere"), + }, + }, + { + in: "count:42 name: \"My name is \"\n'elsewhere'", + out: &MyMessage{ + Count: Int32(42), + Name: String("My name is elsewhere"), + }, + }, + + // Quoted string with escaped apostrophe + { + in: `count:42 name: "HOLIDAY - New Year\'s Day"`, + out: &MyMessage{ + Count: Int32(42), + Name: String("HOLIDAY - New Year's Day"), + }, + }, + + // Quoted string with single quote + { + in: `count:42 name: 'Roger "The Ramster" Ramjet'`, + out: &MyMessage{ + Count: Int32(42), + Name: String(`Roger "The Ramster" Ramjet`), + }, + }, + + // Quoted string with all the accepted special characters from the C++ test + { + in: `count:42 name: ` + "\"\\\"A string with \\' characters \\n and \\r newlines and \\t tabs and \\001 slashes \\\\ and multiple spaces\"", + out: &MyMessage{ + Count: Int32(42), + Name: String("\"A string with ' characters \n and \r newlines and \t tabs and \001 slashes \\ and multiple spaces"), + }, + }, + + // Quoted string with quoted backslash + { + in: `count:42 name: "\\'xyz"`, + out: &MyMessage{ + Count: Int32(42), + Name: String(`\'xyz`), + }, + }, + + // Quoted string with UTF-8 bytes. + { + in: "count:42 name: '\303\277\302\201\xAB'", + out: &MyMessage{ + Count: Int32(42), + Name: String("\303\277\302\201\xAB"), + }, + }, + + // Bad quoted string + { + in: `inner: < host: "\0" >` + "\n", + err: `line 1.15: invalid quoted string "\0": \0 requires 2 following digits`, + }, + + // Number too large for int64 + { + in: "count: 1 others { key: 123456789012345678901 }", + err: "line 1.23: invalid int64: 123456789012345678901", + }, + + // Number too large for int32 + { + in: "count: 1234567890123", + err: "line 1.7: invalid int32: 1234567890123", + }, + + // Number in hexadecimal + { + in: "count: 0x2beef", + out: &MyMessage{ + Count: Int32(0x2beef), + }, + }, + + // Number in octal + { + in: "count: 024601", + out: &MyMessage{ + Count: Int32(024601), + }, + }, + + // Floating point number with "f" suffix + { + in: "count: 4 others:< weight: 17.0f >", + out: &MyMessage{ + Count: Int32(4), + Others: []*OtherMessage{ + { + Weight: Float32(17), + }, + }, + }, + }, + + // Floating point positive infinity + { + in: "count: 4 bigfloat: inf", + out: &MyMessage{ + Count: Int32(4), + Bigfloat: Float64(math.Inf(1)), + }, + }, + + // Floating point negative infinity + { + in: "count: 4 bigfloat: -inf", + out: &MyMessage{ + Count: Int32(4), + Bigfloat: Float64(math.Inf(-1)), + }, + }, + + // Number too large for float32 + { + in: "others:< weight: 12345678901234567890123456789012345678901234567890 >", + err: "line 1.17: invalid float32: 12345678901234567890123456789012345678901234567890", + }, + + // Number posing as a quoted string + { + in: `inner: < host: 12 >` + "\n", + err: `line 1.15: invalid string: 12`, + }, + + // Quoted string posing as int32 + { + in: `count: "12"`, + err: `line 1.7: invalid int32: "12"`, + }, + + // Quoted string posing a float32 + { + in: `others:< weight: "17.4" >`, + err: `line 1.17: invalid float32: "17.4"`, + }, + + // Enum + { + in: `count:42 bikeshed: BLUE`, + out: &MyMessage{ + Count: Int32(42), + Bikeshed: MyMessage_BLUE.Enum(), + }, + }, + + // Repeated field + { + in: `count:42 pet: "horsey" pet:"bunny"`, + out: &MyMessage{ + Count: Int32(42), + Pet: []string{"horsey", "bunny"}, + }, + }, + + // Repeated field with list notation + { + in: `count:42 pet: ["horsey", "bunny"]`, + out: &MyMessage{ + Count: Int32(42), + Pet: []string{"horsey", "bunny"}, + }, + }, + + // Repeated message with/without colon and <>/{} + { + in: `count:42 others:{} others{} others:<> others:{}`, + out: &MyMessage{ + Count: Int32(42), + Others: []*OtherMessage{ + {}, + {}, + {}, + {}, + }, + }, + }, + + // Missing colon for inner message + { + in: `count:42 inner < host: "cauchy.syd" >`, + out: &MyMessage{ + Count: Int32(42), + Inner: &InnerMessage{ + Host: String("cauchy.syd"), + }, + }, + }, + + // Missing colon for string field + { + in: `name "Dave"`, + err: `line 1.5: expected ':', found "\"Dave\""`, + }, + + // Missing colon for int32 field + { + in: `count 42`, + err: `line 1.6: expected ':', found "42"`, + }, + + // Missing required field + { + in: `name: "Pawel"`, + err: `proto: required field "testdata.MyMessage.count" not set`, + out: &MyMessage{ + Name: String("Pawel"), + }, + }, + + // Missing required field in a required submessage + { + in: `count: 42 we_must_go_deeper < leo_finally_won_an_oscar <> >`, + err: `proto: required field "testdata.InnerMessage.host" not set`, + out: &MyMessage{ + Count: Int32(42), + WeMustGoDeeper: &RequiredInnerMessage{LeoFinallyWonAnOscar: &InnerMessage{}}, + }, + }, + + // Repeated non-repeated field + { + in: `name: "Rob" name: "Russ"`, + err: `line 1.12: non-repeated field "name" was repeated`, + }, + + // Group + { + in: `count: 17 SomeGroup { group_field: 12 }`, + out: &MyMessage{ + Count: Int32(17), + Somegroup: &MyMessage_SomeGroup{ + GroupField: Int32(12), + }, + }, + }, + + // Semicolon between fields + { + in: `count:3;name:"Calvin"`, + out: &MyMessage{ + Count: Int32(3), + Name: String("Calvin"), + }, + }, + // Comma between fields + { + in: `count:4,name:"Ezekiel"`, + out: &MyMessage{ + Count: Int32(4), + Name: String("Ezekiel"), + }, + }, + + // Boolean false + { + in: `count:42 inner { host: "example.com" connected: false }`, + out: &MyMessage{ + Count: Int32(42), + Inner: &InnerMessage{ + Host: String("example.com"), + Connected: Bool(false), + }, + }, + }, + // Boolean true + { + in: `count:42 inner { host: "example.com" connected: true }`, + out: &MyMessage{ + Count: Int32(42), + Inner: &InnerMessage{ + Host: String("example.com"), + Connected: Bool(true), + }, + }, + }, + // Boolean 0 + { + in: `count:42 inner { host: "example.com" connected: 0 }`, + out: &MyMessage{ + Count: Int32(42), + Inner: &InnerMessage{ + Host: String("example.com"), + Connected: Bool(false), + }, + }, + }, + // Boolean 1 + { + in: `count:42 inner { host: "example.com" connected: 1 }`, + out: &MyMessage{ + Count: Int32(42), + Inner: &InnerMessage{ + Host: String("example.com"), + Connected: Bool(true), + }, + }, + }, + // Boolean f + { + in: `count:42 inner { host: "example.com" connected: f }`, + out: &MyMessage{ + Count: Int32(42), + Inner: &InnerMessage{ + Host: String("example.com"), + Connected: Bool(false), + }, + }, + }, + // Boolean t + { + in: `count:42 inner { host: "example.com" connected: t }`, + out: &MyMessage{ + Count: Int32(42), + Inner: &InnerMessage{ + Host: String("example.com"), + Connected: Bool(true), + }, + }, + }, + // Boolean False + { + in: `count:42 inner { host: "example.com" connected: False }`, + out: &MyMessage{ + Count: Int32(42), + Inner: &InnerMessage{ + Host: String("example.com"), + Connected: Bool(false), + }, + }, + }, + // Boolean True + { + in: `count:42 inner { host: "example.com" connected: True }`, + out: &MyMessage{ + Count: Int32(42), + Inner: &InnerMessage{ + Host: String("example.com"), + Connected: Bool(true), + }, + }, + }, + + // Extension + buildExtStructTest(`count: 42 [testdata.Ext.more]:`), + buildExtStructTest(`count: 42 [testdata.Ext.more] {data:"Hello, world!"}`), + buildExtDataTest(`count: 42 [testdata.Ext.text]:"Hello, world!" [testdata.Ext.number]:1729`), + buildExtRepStringTest(`count: 42 [testdata.greeting]:"bula" [testdata.greeting]:"hola"`), + + // Big all-in-one + { + in: "count:42 # Meaning\n" + + `name:"Dave" ` + + `quote:"\"I didn't want to go.\"" ` + + `pet:"bunny" ` + + `pet:"kitty" ` + + `pet:"horsey" ` + + `inner:<` + + ` host:"footrest.syd" ` + + ` port:7001 ` + + ` connected:true ` + + `> ` + + `others:<` + + ` key:3735928559 ` + + ` value:"\x01A\a\f" ` + + `> ` + + `others:<` + + " weight:58.9 # Atomic weight of Co\n" + + ` inner:<` + + ` host:"lesha.mtv" ` + + ` port:8002 ` + + ` >` + + `>`, + out: &MyMessage{ + Count: Int32(42), + Name: String("Dave"), + Quote: String(`"I didn't want to go."`), + Pet: []string{"bunny", "kitty", "horsey"}, + Inner: &InnerMessage{ + Host: String("footrest.syd"), + Port: Int32(7001), + Connected: Bool(true), + }, + Others: []*OtherMessage{ + { + Key: Int64(3735928559), + Value: []byte{0x1, 'A', '\a', '\f'}, + }, + { + Weight: Float32(58.9), + Inner: &InnerMessage{ + Host: String("lesha.mtv"), + Port: Int32(8002), + }, + }, + }, + }, + }, +} + +func TestUnmarshalText(t *testing.T) { + for i, test := range unMarshalTextTests { + pb := new(MyMessage) + err := UnmarshalText(test.in, pb) + if test.err == "" { + // We don't expect failure. + if err != nil { + t.Errorf("Test %d: Unexpected error: %v", i, err) + } else if !reflect.DeepEqual(pb, test.out) { + t.Errorf("Test %d: Incorrect populated \nHave: %v\nWant: %v", + i, pb, test.out) + } + } else { + // We do expect failure. + if err == nil { + t.Errorf("Test %d: Didn't get expected error: %v", i, test.err) + } else if err.Error() != test.err { + t.Errorf("Test %d: Incorrect error.\nHave: %v\nWant: %v", + i, err.Error(), test.err) + } else if _, ok := err.(*RequiredNotSetError); ok && test.out != nil && !reflect.DeepEqual(pb, test.out) { + t.Errorf("Test %d: Incorrect populated \nHave: %v\nWant: %v", + i, pb, test.out) + } + } + } +} + +func TestUnmarshalTextCustomMessage(t *testing.T) { + msg := &textMessage{} + if err := UnmarshalText("custom", msg); err != nil { + t.Errorf("Unexpected error from custom unmarshal: %v", err) + } + if UnmarshalText("not custom", msg) == nil { + t.Errorf("Didn't get expected error from custom unmarshal") + } +} + +// Regression test; this caused a panic. +func TestRepeatedEnum(t *testing.T) { + pb := new(RepeatedEnum) + if err := UnmarshalText("color: RED", pb); err != nil { + t.Fatal(err) + } + exp := &RepeatedEnum{ + Color: []RepeatedEnum_Color{RepeatedEnum_RED}, + } + if !Equal(pb, exp) { + t.Errorf("Incorrect populated \nHave: %v\nWant: %v", pb, exp) + } +} + +func TestProto3TextParsing(t *testing.T) { + m := new(proto3pb.Message) + const in = `name: "Wallace" true_scotsman: true` + want := &proto3pb.Message{ + Name: "Wallace", + TrueScotsman: true, + } + if err := UnmarshalText(in, m); err != nil { + t.Fatal(err) + } + if !Equal(m, want) { + t.Errorf("\n got %v\nwant %v", m, want) + } +} + +func TestMapParsing(t *testing.T) { + m := new(MessageWithMap) + const in = `name_mapping: name_mapping:` + + `msg_mapping:,>` + // separating commas are okay + `msg_mapping>` + // no colon after "value" + `msg_mapping:>` + // omitted key + `msg_mapping:` + // omitted value + `byte_mapping:` + + `byte_mapping:<>` // omitted key and value + want := &MessageWithMap{ + NameMapping: map[int32]string{ + 1: "Beatles", + 1234: "Feist", + }, + MsgMapping: map[int64]*FloatingPoint{ + -4: {F: Float64(2.0)}, + -2: {F: Float64(4.0)}, + 0: {F: Float64(5.0)}, + 1: nil, + }, + ByteMapping: map[bool][]byte{ + false: nil, + true: []byte("so be it"), + }, + } + if err := UnmarshalText(in, m); err != nil { + t.Fatal(err) + } + if !Equal(m, want) { + t.Errorf("\n got %v\nwant %v", m, want) + } +} + +func TestOneofParsing(t *testing.T) { + const in = `name:"Shrek"` + m := new(Communique) + want := &Communique{Union: &Communique_Name{"Shrek"}} + if err := UnmarshalText(in, m); err != nil { + t.Fatal(err) + } + if !Equal(m, want) { + t.Errorf("\n got %v\nwant %v", m, want) + } + + const inOverwrite = `name:"Shrek" number:42` + m = new(Communique) + testErr := "line 1.13: field 'number' would overwrite already parsed oneof 'Union'" + if err := UnmarshalText(inOverwrite, m); err == nil { + t.Errorf("TestOneofParsing: Didn't get expected error: %v", testErr) + } else if err.Error() != testErr { + t.Errorf("TestOneofParsing: Incorrect error.\nHave: %v\nWant: %v", + err.Error(), testErr) + } + +} + +var benchInput string + +func init() { + benchInput = "count: 4\n" + for i := 0; i < 1000; i++ { + benchInput += "pet: \"fido\"\n" + } + + // Check it is valid input. + pb := new(MyMessage) + err := UnmarshalText(benchInput, pb) + if err != nil { + panic("Bad benchmark input: " + err.Error()) + } +} + +func BenchmarkUnmarshalText(b *testing.B) { + pb := new(MyMessage) + for i := 0; i < b.N; i++ { + UnmarshalText(benchInput, pb) + } + b.SetBytes(int64(len(benchInput))) +} diff --git a/vendor/github.com/golang/protobuf/proto/text_test.go b/vendor/github.com/golang/protobuf/proto/text_test.go new file mode 100644 index 000000000..3eabacac8 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/text_test.go @@ -0,0 +1,474 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2010 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package proto_test + +import ( + "bytes" + "errors" + "io/ioutil" + "math" + "strings" + "testing" + + "github.com/golang/protobuf/proto" + + proto3pb "github.com/golang/protobuf/proto/proto3_proto" + pb "github.com/golang/protobuf/proto/testdata" +) + +// textMessage implements the methods that allow it to marshal and unmarshal +// itself as text. +type textMessage struct { +} + +func (*textMessage) MarshalText() ([]byte, error) { + return []byte("custom"), nil +} + +func (*textMessage) UnmarshalText(bytes []byte) error { + if string(bytes) != "custom" { + return errors.New("expected 'custom'") + } + return nil +} + +func (*textMessage) Reset() {} +func (*textMessage) String() string { return "" } +func (*textMessage) ProtoMessage() {} + +func newTestMessage() *pb.MyMessage { + msg := &pb.MyMessage{ + Count: proto.Int32(42), + Name: proto.String("Dave"), + Quote: proto.String(`"I didn't want to go."`), + Pet: []string{"bunny", "kitty", "horsey"}, + Inner: &pb.InnerMessage{ + Host: proto.String("footrest.syd"), + Port: proto.Int32(7001), + Connected: proto.Bool(true), + }, + Others: []*pb.OtherMessage{ + { + Key: proto.Int64(0xdeadbeef), + Value: []byte{1, 65, 7, 12}, + }, + { + Weight: proto.Float32(6.022), + Inner: &pb.InnerMessage{ + Host: proto.String("lesha.mtv"), + Port: proto.Int32(8002), + }, + }, + }, + Bikeshed: pb.MyMessage_BLUE.Enum(), + Somegroup: &pb.MyMessage_SomeGroup{ + GroupField: proto.Int32(8), + }, + // One normally wouldn't do this. + // This is an undeclared tag 13, as a varint (wire type 0) with value 4. + XXX_unrecognized: []byte{13<<3 | 0, 4}, + } + ext := &pb.Ext{ + Data: proto.String("Big gobs for big rats"), + } + if err := proto.SetExtension(msg, pb.E_Ext_More, ext); err != nil { + panic(err) + } + greetings := []string{"adg", "easy", "cow"} + if err := proto.SetExtension(msg, pb.E_Greeting, greetings); err != nil { + panic(err) + } + + // Add an unknown extension. We marshal a pb.Ext, and fake the ID. + b, err := proto.Marshal(&pb.Ext{Data: proto.String("3G skiing")}) + if err != nil { + panic(err) + } + b = append(proto.EncodeVarint(201<<3|proto.WireBytes), b...) + proto.SetRawExtension(msg, 201, b) + + // Extensions can be plain fields, too, so let's test that. + b = append(proto.EncodeVarint(202<<3|proto.WireVarint), 19) + proto.SetRawExtension(msg, 202, b) + + return msg +} + +const text = `count: 42 +name: "Dave" +quote: "\"I didn't want to go.\"" +pet: "bunny" +pet: "kitty" +pet: "horsey" +inner: < + host: "footrest.syd" + port: 7001 + connected: true +> +others: < + key: 3735928559 + value: "\001A\007\014" +> +others: < + weight: 6.022 + inner: < + host: "lesha.mtv" + port: 8002 + > +> +bikeshed: BLUE +SomeGroup { + group_field: 8 +} +/* 2 unknown bytes */ +13: 4 +[testdata.Ext.more]: < + data: "Big gobs for big rats" +> +[testdata.greeting]: "adg" +[testdata.greeting]: "easy" +[testdata.greeting]: "cow" +/* 13 unknown bytes */ +201: "\t3G skiing" +/* 3 unknown bytes */ +202: 19 +` + +func TestMarshalText(t *testing.T) { + buf := new(bytes.Buffer) + if err := proto.MarshalText(buf, newTestMessage()); err != nil { + t.Fatalf("proto.MarshalText: %v", err) + } + s := buf.String() + if s != text { + t.Errorf("Got:\n===\n%v===\nExpected:\n===\n%v===\n", s, text) + } +} + +func TestMarshalTextCustomMessage(t *testing.T) { + buf := new(bytes.Buffer) + if err := proto.MarshalText(buf, &textMessage{}); err != nil { + t.Fatalf("proto.MarshalText: %v", err) + } + s := buf.String() + if s != "custom" { + t.Errorf("Got %q, expected %q", s, "custom") + } +} +func TestMarshalTextNil(t *testing.T) { + want := "" + tests := []proto.Message{nil, (*pb.MyMessage)(nil)} + for i, test := range tests { + buf := new(bytes.Buffer) + if err := proto.MarshalText(buf, test); err != nil { + t.Fatal(err) + } + if got := buf.String(); got != want { + t.Errorf("%d: got %q want %q", i, got, want) + } + } +} + +func TestMarshalTextUnknownEnum(t *testing.T) { + // The Color enum only specifies values 0-2. + m := &pb.MyMessage{Bikeshed: pb.MyMessage_Color(3).Enum()} + got := m.String() + const want = `bikeshed:3 ` + if got != want { + t.Errorf("\n got %q\nwant %q", got, want) + } +} + +func TestTextOneof(t *testing.T) { + tests := []struct { + m proto.Message + want string + }{ + // zero message + {&pb.Communique{}, ``}, + // scalar field + {&pb.Communique{Union: &pb.Communique_Number{4}}, `number:4`}, + // message field + {&pb.Communique{Union: &pb.Communique_Msg{ + &pb.Strings{StringField: proto.String("why hello!")}, + }}, `msg:`}, + // bad oneof (should not panic) + {&pb.Communique{Union: &pb.Communique_Msg{nil}}, `msg:/* nil */`}, + } + for _, test := range tests { + got := strings.TrimSpace(test.m.String()) + if got != test.want { + t.Errorf("\n got %s\nwant %s", got, test.want) + } + } +} + +func BenchmarkMarshalTextBuffered(b *testing.B) { + buf := new(bytes.Buffer) + m := newTestMessage() + for i := 0; i < b.N; i++ { + buf.Reset() + proto.MarshalText(buf, m) + } +} + +func BenchmarkMarshalTextUnbuffered(b *testing.B) { + w := ioutil.Discard + m := newTestMessage() + for i := 0; i < b.N; i++ { + proto.MarshalText(w, m) + } +} + +func compact(src string) string { + // s/[ \n]+/ /g; s/ $//; + dst := make([]byte, len(src)) + space, comment := false, false + j := 0 + for i := 0; i < len(src); i++ { + if strings.HasPrefix(src[i:], "/*") { + comment = true + i++ + continue + } + if comment && strings.HasPrefix(src[i:], "*/") { + comment = false + i++ + continue + } + if comment { + continue + } + c := src[i] + if c == ' ' || c == '\n' { + space = true + continue + } + if j > 0 && (dst[j-1] == ':' || dst[j-1] == '<' || dst[j-1] == '{') { + space = false + } + if c == '{' { + space = false + } + if space { + dst[j] = ' ' + j++ + space = false + } + dst[j] = c + j++ + } + if space { + dst[j] = ' ' + j++ + } + return string(dst[0:j]) +} + +var compactText = compact(text) + +func TestCompactText(t *testing.T) { + s := proto.CompactTextString(newTestMessage()) + if s != compactText { + t.Errorf("Got:\n===\n%v===\nExpected:\n===\n%v\n===\n", s, compactText) + } +} + +func TestStringEscaping(t *testing.T) { + testCases := []struct { + in *pb.Strings + out string + }{ + { + // Test data from C++ test (TextFormatTest.StringEscape). + // Single divergence: we don't escape apostrophes. + &pb.Strings{StringField: proto.String("\"A string with ' characters \n and \r newlines and \t tabs and \001 slashes \\ and multiple spaces")}, + "string_field: \"\\\"A string with ' characters \\n and \\r newlines and \\t tabs and \\001 slashes \\\\ and multiple spaces\"\n", + }, + { + // Test data from the same C++ test. + &pb.Strings{StringField: proto.String("\350\260\267\346\255\214")}, + "string_field: \"\\350\\260\\267\\346\\255\\214\"\n", + }, + { + // Some UTF-8. + &pb.Strings{StringField: proto.String("\x00\x01\xff\x81")}, + `string_field: "\000\001\377\201"` + "\n", + }, + } + + for i, tc := range testCases { + var buf bytes.Buffer + if err := proto.MarshalText(&buf, tc.in); err != nil { + t.Errorf("proto.MarsalText: %v", err) + continue + } + s := buf.String() + if s != tc.out { + t.Errorf("#%d: Got:\n%s\nExpected:\n%s\n", i, s, tc.out) + continue + } + + // Check round-trip. + pb := new(pb.Strings) + if err := proto.UnmarshalText(s, pb); err != nil { + t.Errorf("#%d: UnmarshalText: %v", i, err) + continue + } + if !proto.Equal(pb, tc.in) { + t.Errorf("#%d: Round-trip failed:\nstart: %v\n end: %v", i, tc.in, pb) + } + } +} + +// A limitedWriter accepts some output before it fails. +// This is a proxy for something like a nearly-full or imminently-failing disk, +// or a network connection that is about to die. +type limitedWriter struct { + b bytes.Buffer + limit int +} + +var outOfSpace = errors.New("proto: insufficient space") + +func (w *limitedWriter) Write(p []byte) (n int, err error) { + var avail = w.limit - w.b.Len() + if avail <= 0 { + return 0, outOfSpace + } + if len(p) <= avail { + return w.b.Write(p) + } + n, _ = w.b.Write(p[:avail]) + return n, outOfSpace +} + +func TestMarshalTextFailing(t *testing.T) { + // Try lots of different sizes to exercise more error code-paths. + for lim := 0; lim < len(text); lim++ { + buf := new(limitedWriter) + buf.limit = lim + err := proto.MarshalText(buf, newTestMessage()) + // We expect a certain error, but also some partial results in the buffer. + if err != outOfSpace { + t.Errorf("Got:\n===\n%v===\nExpected:\n===\n%v===\n", err, outOfSpace) + } + s := buf.b.String() + x := text[:buf.limit] + if s != x { + t.Errorf("Got:\n===\n%v===\nExpected:\n===\n%v===\n", s, x) + } + } +} + +func TestFloats(t *testing.T) { + tests := []struct { + f float64 + want string + }{ + {0, "0"}, + {4.7, "4.7"}, + {math.Inf(1), "inf"}, + {math.Inf(-1), "-inf"}, + {math.NaN(), "nan"}, + } + for _, test := range tests { + msg := &pb.FloatingPoint{F: &test.f} + got := strings.TrimSpace(msg.String()) + want := `f:` + test.want + if got != want { + t.Errorf("f=%f: got %q, want %q", test.f, got, want) + } + } +} + +func TestRepeatedNilText(t *testing.T) { + m := &pb.MessageList{ + Message: []*pb.MessageList_Message{ + nil, + &pb.MessageList_Message{ + Name: proto.String("Horse"), + }, + nil, + }, + } + want := `Message +Message { + name: "Horse" +} +Message +` + if s := proto.MarshalTextString(m); s != want { + t.Errorf(" got: %s\nwant: %s", s, want) + } +} + +func TestProto3Text(t *testing.T) { + tests := []struct { + m proto.Message + want string + }{ + // zero message + {&proto3pb.Message{}, ``}, + // zero message except for an empty byte slice + {&proto3pb.Message{Data: []byte{}}, ``}, + // trivial case + {&proto3pb.Message{Name: "Rob", HeightInCm: 175}, `name:"Rob" height_in_cm:175`}, + // empty map + {&pb.MessageWithMap{}, ``}, + // non-empty map; map format is the same as a repeated struct, + // and they are sorted by key (numerically for numeric keys). + { + &pb.MessageWithMap{NameMapping: map[int32]string{ + -1: "Negatory", + 7: "Lucky", + 1234: "Feist", + 6345789: "Otis", + }}, + `name_mapping: ` + + `name_mapping: ` + + `name_mapping: ` + + `name_mapping:`, + }, + // map with nil value; not well-defined, but we shouldn't crash + { + &pb.MessageWithMap{MsgMapping: map[int64]*pb.FloatingPoint{7: nil}}, + `msg_mapping:`, + }, + } + for _, test := range tests { + got := strings.TrimSpace(test.m.String()) + if got != test.want { + t.Errorf("\n got %s\nwant %s", got, test.want) + } + } +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/Makefile b/vendor/github.com/golang/protobuf/protoc-gen-go/Makefile new file mode 100644 index 000000000..a42cc3717 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/Makefile @@ -0,0 +1,33 @@ +# Go support for Protocol Buffers - Google's data interchange format +# +# Copyright 2010 The Go Authors. All rights reserved. +# https://github.com/golang/protobuf +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +test: + cd testdata && make test diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/Makefile b/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/Makefile new file mode 100644 index 000000000..f706871a6 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/Makefile @@ -0,0 +1,37 @@ +# Go support for Protocol Buffers - Google's data interchange format +# +# Copyright 2010 The Go Authors. All rights reserved. +# https://github.com/golang/protobuf +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# Not stored here, but descriptor.proto is in https://github.com/google/protobuf/ +# at src/google/protobuf/descriptor.proto +regenerate: + @echo WARNING! THIS RULE IS PROBABLY NOT RIGHT FOR YOUR INSTALLATION + cp $(HOME)/src/protobuf/include/google/protobuf/descriptor.proto . + protoc --go_out=../../../../.. -I$(HOME)/src/protobuf/include $(HOME)/src/protobuf/include/google/protobuf/descriptor.proto diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/descriptor.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/descriptor.pb.go new file mode 100644 index 000000000..c6a91bcab --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/descriptor.pb.go @@ -0,0 +1,2215 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/protobuf/descriptor.proto + +/* +Package descriptor is a generated protocol buffer package. + +It is generated from these files: + google/protobuf/descriptor.proto + +It has these top-level messages: + FileDescriptorSet + FileDescriptorProto + DescriptorProto + ExtensionRangeOptions + FieldDescriptorProto + OneofDescriptorProto + EnumDescriptorProto + EnumValueDescriptorProto + ServiceDescriptorProto + MethodDescriptorProto + FileOptions + MessageOptions + FieldOptions + OneofOptions + EnumOptions + EnumValueOptions + ServiceOptions + MethodOptions + UninterpretedOption + SourceCodeInfo + GeneratedCodeInfo +*/ +package descriptor + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type FieldDescriptorProto_Type int32 + +const ( + // 0 is reserved for errors. + // Order is weird for historical reasons. + FieldDescriptorProto_TYPE_DOUBLE FieldDescriptorProto_Type = 1 + FieldDescriptorProto_TYPE_FLOAT FieldDescriptorProto_Type = 2 + // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + // negative values are likely. + FieldDescriptorProto_TYPE_INT64 FieldDescriptorProto_Type = 3 + FieldDescriptorProto_TYPE_UINT64 FieldDescriptorProto_Type = 4 + // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + // negative values are likely. + FieldDescriptorProto_TYPE_INT32 FieldDescriptorProto_Type = 5 + FieldDescriptorProto_TYPE_FIXED64 FieldDescriptorProto_Type = 6 + FieldDescriptorProto_TYPE_FIXED32 FieldDescriptorProto_Type = 7 + FieldDescriptorProto_TYPE_BOOL FieldDescriptorProto_Type = 8 + FieldDescriptorProto_TYPE_STRING FieldDescriptorProto_Type = 9 + // Tag-delimited aggregate. + // Group type is deprecated and not supported in proto3. However, Proto3 + // implementations should still be able to parse the group wire format and + // treat group fields as unknown fields. + FieldDescriptorProto_TYPE_GROUP FieldDescriptorProto_Type = 10 + FieldDescriptorProto_TYPE_MESSAGE FieldDescriptorProto_Type = 11 + // New in version 2. + FieldDescriptorProto_TYPE_BYTES FieldDescriptorProto_Type = 12 + FieldDescriptorProto_TYPE_UINT32 FieldDescriptorProto_Type = 13 + FieldDescriptorProto_TYPE_ENUM FieldDescriptorProto_Type = 14 + FieldDescriptorProto_TYPE_SFIXED32 FieldDescriptorProto_Type = 15 + FieldDescriptorProto_TYPE_SFIXED64 FieldDescriptorProto_Type = 16 + FieldDescriptorProto_TYPE_SINT32 FieldDescriptorProto_Type = 17 + FieldDescriptorProto_TYPE_SINT64 FieldDescriptorProto_Type = 18 +) + +var FieldDescriptorProto_Type_name = map[int32]string{ + 1: "TYPE_DOUBLE", + 2: "TYPE_FLOAT", + 3: "TYPE_INT64", + 4: "TYPE_UINT64", + 5: "TYPE_INT32", + 6: "TYPE_FIXED64", + 7: "TYPE_FIXED32", + 8: "TYPE_BOOL", + 9: "TYPE_STRING", + 10: "TYPE_GROUP", + 11: "TYPE_MESSAGE", + 12: "TYPE_BYTES", + 13: "TYPE_UINT32", + 14: "TYPE_ENUM", + 15: "TYPE_SFIXED32", + 16: "TYPE_SFIXED64", + 17: "TYPE_SINT32", + 18: "TYPE_SINT64", +} +var FieldDescriptorProto_Type_value = map[string]int32{ + "TYPE_DOUBLE": 1, + "TYPE_FLOAT": 2, + "TYPE_INT64": 3, + "TYPE_UINT64": 4, + "TYPE_INT32": 5, + "TYPE_FIXED64": 6, + "TYPE_FIXED32": 7, + "TYPE_BOOL": 8, + "TYPE_STRING": 9, + "TYPE_GROUP": 10, + "TYPE_MESSAGE": 11, + "TYPE_BYTES": 12, + "TYPE_UINT32": 13, + "TYPE_ENUM": 14, + "TYPE_SFIXED32": 15, + "TYPE_SFIXED64": 16, + "TYPE_SINT32": 17, + "TYPE_SINT64": 18, +} + +func (x FieldDescriptorProto_Type) Enum() *FieldDescriptorProto_Type { + p := new(FieldDescriptorProto_Type) + *p = x + return p +} +func (x FieldDescriptorProto_Type) String() string { + return proto.EnumName(FieldDescriptorProto_Type_name, int32(x)) +} +func (x *FieldDescriptorProto_Type) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(FieldDescriptorProto_Type_value, data, "FieldDescriptorProto_Type") + if err != nil { + return err + } + *x = FieldDescriptorProto_Type(value) + return nil +} +func (FieldDescriptorProto_Type) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{4, 0} } + +type FieldDescriptorProto_Label int32 + +const ( + // 0 is reserved for errors + FieldDescriptorProto_LABEL_OPTIONAL FieldDescriptorProto_Label = 1 + FieldDescriptorProto_LABEL_REQUIRED FieldDescriptorProto_Label = 2 + FieldDescriptorProto_LABEL_REPEATED FieldDescriptorProto_Label = 3 +) + +var FieldDescriptorProto_Label_name = map[int32]string{ + 1: "LABEL_OPTIONAL", + 2: "LABEL_REQUIRED", + 3: "LABEL_REPEATED", +} +var FieldDescriptorProto_Label_value = map[string]int32{ + "LABEL_OPTIONAL": 1, + "LABEL_REQUIRED": 2, + "LABEL_REPEATED": 3, +} + +func (x FieldDescriptorProto_Label) Enum() *FieldDescriptorProto_Label { + p := new(FieldDescriptorProto_Label) + *p = x + return p +} +func (x FieldDescriptorProto_Label) String() string { + return proto.EnumName(FieldDescriptorProto_Label_name, int32(x)) +} +func (x *FieldDescriptorProto_Label) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(FieldDescriptorProto_Label_value, data, "FieldDescriptorProto_Label") + if err != nil { + return err + } + *x = FieldDescriptorProto_Label(value) + return nil +} +func (FieldDescriptorProto_Label) EnumDescriptor() ([]byte, []int) { + return fileDescriptor0, []int{4, 1} +} + +// Generated classes can be optimized for speed or code size. +type FileOptions_OptimizeMode int32 + +const ( + FileOptions_SPEED FileOptions_OptimizeMode = 1 + // etc. + FileOptions_CODE_SIZE FileOptions_OptimizeMode = 2 + FileOptions_LITE_RUNTIME FileOptions_OptimizeMode = 3 +) + +var FileOptions_OptimizeMode_name = map[int32]string{ + 1: "SPEED", + 2: "CODE_SIZE", + 3: "LITE_RUNTIME", +} +var FileOptions_OptimizeMode_value = map[string]int32{ + "SPEED": 1, + "CODE_SIZE": 2, + "LITE_RUNTIME": 3, +} + +func (x FileOptions_OptimizeMode) Enum() *FileOptions_OptimizeMode { + p := new(FileOptions_OptimizeMode) + *p = x + return p +} +func (x FileOptions_OptimizeMode) String() string { + return proto.EnumName(FileOptions_OptimizeMode_name, int32(x)) +} +func (x *FileOptions_OptimizeMode) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(FileOptions_OptimizeMode_value, data, "FileOptions_OptimizeMode") + if err != nil { + return err + } + *x = FileOptions_OptimizeMode(value) + return nil +} +func (FileOptions_OptimizeMode) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{10, 0} } + +type FieldOptions_CType int32 + +const ( + // Default mode. + FieldOptions_STRING FieldOptions_CType = 0 + FieldOptions_CORD FieldOptions_CType = 1 + FieldOptions_STRING_PIECE FieldOptions_CType = 2 +) + +var FieldOptions_CType_name = map[int32]string{ + 0: "STRING", + 1: "CORD", + 2: "STRING_PIECE", +} +var FieldOptions_CType_value = map[string]int32{ + "STRING": 0, + "CORD": 1, + "STRING_PIECE": 2, +} + +func (x FieldOptions_CType) Enum() *FieldOptions_CType { + p := new(FieldOptions_CType) + *p = x + return p +} +func (x FieldOptions_CType) String() string { + return proto.EnumName(FieldOptions_CType_name, int32(x)) +} +func (x *FieldOptions_CType) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(FieldOptions_CType_value, data, "FieldOptions_CType") + if err != nil { + return err + } + *x = FieldOptions_CType(value) + return nil +} +func (FieldOptions_CType) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{12, 0} } + +type FieldOptions_JSType int32 + +const ( + // Use the default type. + FieldOptions_JS_NORMAL FieldOptions_JSType = 0 + // Use JavaScript strings. + FieldOptions_JS_STRING FieldOptions_JSType = 1 + // Use JavaScript numbers. + FieldOptions_JS_NUMBER FieldOptions_JSType = 2 +) + +var FieldOptions_JSType_name = map[int32]string{ + 0: "JS_NORMAL", + 1: "JS_STRING", + 2: "JS_NUMBER", +} +var FieldOptions_JSType_value = map[string]int32{ + "JS_NORMAL": 0, + "JS_STRING": 1, + "JS_NUMBER": 2, +} + +func (x FieldOptions_JSType) Enum() *FieldOptions_JSType { + p := new(FieldOptions_JSType) + *p = x + return p +} +func (x FieldOptions_JSType) String() string { + return proto.EnumName(FieldOptions_JSType_name, int32(x)) +} +func (x *FieldOptions_JSType) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(FieldOptions_JSType_value, data, "FieldOptions_JSType") + if err != nil { + return err + } + *x = FieldOptions_JSType(value) + return nil +} +func (FieldOptions_JSType) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{12, 1} } + +// Is this method side-effect-free (or safe in HTTP parlance), or idempotent, +// or neither? HTTP based RPC implementation may choose GET verb for safe +// methods, and PUT verb for idempotent methods instead of the default POST. +type MethodOptions_IdempotencyLevel int32 + +const ( + MethodOptions_IDEMPOTENCY_UNKNOWN MethodOptions_IdempotencyLevel = 0 + MethodOptions_NO_SIDE_EFFECTS MethodOptions_IdempotencyLevel = 1 + MethodOptions_IDEMPOTENT MethodOptions_IdempotencyLevel = 2 +) + +var MethodOptions_IdempotencyLevel_name = map[int32]string{ + 0: "IDEMPOTENCY_UNKNOWN", + 1: "NO_SIDE_EFFECTS", + 2: "IDEMPOTENT", +} +var MethodOptions_IdempotencyLevel_value = map[string]int32{ + "IDEMPOTENCY_UNKNOWN": 0, + "NO_SIDE_EFFECTS": 1, + "IDEMPOTENT": 2, +} + +func (x MethodOptions_IdempotencyLevel) Enum() *MethodOptions_IdempotencyLevel { + p := new(MethodOptions_IdempotencyLevel) + *p = x + return p +} +func (x MethodOptions_IdempotencyLevel) String() string { + return proto.EnumName(MethodOptions_IdempotencyLevel_name, int32(x)) +} +func (x *MethodOptions_IdempotencyLevel) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(MethodOptions_IdempotencyLevel_value, data, "MethodOptions_IdempotencyLevel") + if err != nil { + return err + } + *x = MethodOptions_IdempotencyLevel(value) + return nil +} +func (MethodOptions_IdempotencyLevel) EnumDescriptor() ([]byte, []int) { + return fileDescriptor0, []int{17, 0} +} + +// The protocol compiler can output a FileDescriptorSet containing the .proto +// files it parses. +type FileDescriptorSet struct { + File []*FileDescriptorProto `protobuf:"bytes,1,rep,name=file" json:"file,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *FileDescriptorSet) Reset() { *m = FileDescriptorSet{} } +func (m *FileDescriptorSet) String() string { return proto.CompactTextString(m) } +func (*FileDescriptorSet) ProtoMessage() {} +func (*FileDescriptorSet) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } + +func (m *FileDescriptorSet) GetFile() []*FileDescriptorProto { + if m != nil { + return m.File + } + return nil +} + +// Describes a complete .proto file. +type FileDescriptorProto struct { + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Package *string `protobuf:"bytes,2,opt,name=package" json:"package,omitempty"` + // Names of files imported by this file. + Dependency []string `protobuf:"bytes,3,rep,name=dependency" json:"dependency,omitempty"` + // Indexes of the public imported files in the dependency list above. + PublicDependency []int32 `protobuf:"varint,10,rep,name=public_dependency,json=publicDependency" json:"public_dependency,omitempty"` + // Indexes of the weak imported files in the dependency list. + // For Google-internal migration only. Do not use. + WeakDependency []int32 `protobuf:"varint,11,rep,name=weak_dependency,json=weakDependency" json:"weak_dependency,omitempty"` + // All top-level definitions in this file. + MessageType []*DescriptorProto `protobuf:"bytes,4,rep,name=message_type,json=messageType" json:"message_type,omitempty"` + EnumType []*EnumDescriptorProto `protobuf:"bytes,5,rep,name=enum_type,json=enumType" json:"enum_type,omitempty"` + Service []*ServiceDescriptorProto `protobuf:"bytes,6,rep,name=service" json:"service,omitempty"` + Extension []*FieldDescriptorProto `protobuf:"bytes,7,rep,name=extension" json:"extension,omitempty"` + Options *FileOptions `protobuf:"bytes,8,opt,name=options" json:"options,omitempty"` + // This field contains optional information about the original source code. + // You may safely remove this entire field without harming runtime + // functionality of the descriptors -- the information is needed only by + // development tools. + SourceCodeInfo *SourceCodeInfo `protobuf:"bytes,9,opt,name=source_code_info,json=sourceCodeInfo" json:"source_code_info,omitempty"` + // The syntax of the proto file. + // The supported values are "proto2" and "proto3". + Syntax *string `protobuf:"bytes,12,opt,name=syntax" json:"syntax,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *FileDescriptorProto) Reset() { *m = FileDescriptorProto{} } +func (m *FileDescriptorProto) String() string { return proto.CompactTextString(m) } +func (*FileDescriptorProto) ProtoMessage() {} +func (*FileDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } + +func (m *FileDescriptorProto) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *FileDescriptorProto) GetPackage() string { + if m != nil && m.Package != nil { + return *m.Package + } + return "" +} + +func (m *FileDescriptorProto) GetDependency() []string { + if m != nil { + return m.Dependency + } + return nil +} + +func (m *FileDescriptorProto) GetPublicDependency() []int32 { + if m != nil { + return m.PublicDependency + } + return nil +} + +func (m *FileDescriptorProto) GetWeakDependency() []int32 { + if m != nil { + return m.WeakDependency + } + return nil +} + +func (m *FileDescriptorProto) GetMessageType() []*DescriptorProto { + if m != nil { + return m.MessageType + } + return nil +} + +func (m *FileDescriptorProto) GetEnumType() []*EnumDescriptorProto { + if m != nil { + return m.EnumType + } + return nil +} + +func (m *FileDescriptorProto) GetService() []*ServiceDescriptorProto { + if m != nil { + return m.Service + } + return nil +} + +func (m *FileDescriptorProto) GetExtension() []*FieldDescriptorProto { + if m != nil { + return m.Extension + } + return nil +} + +func (m *FileDescriptorProto) GetOptions() *FileOptions { + if m != nil { + return m.Options + } + return nil +} + +func (m *FileDescriptorProto) GetSourceCodeInfo() *SourceCodeInfo { + if m != nil { + return m.SourceCodeInfo + } + return nil +} + +func (m *FileDescriptorProto) GetSyntax() string { + if m != nil && m.Syntax != nil { + return *m.Syntax + } + return "" +} + +// Describes a message type. +type DescriptorProto struct { + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Field []*FieldDescriptorProto `protobuf:"bytes,2,rep,name=field" json:"field,omitempty"` + Extension []*FieldDescriptorProto `protobuf:"bytes,6,rep,name=extension" json:"extension,omitempty"` + NestedType []*DescriptorProto `protobuf:"bytes,3,rep,name=nested_type,json=nestedType" json:"nested_type,omitempty"` + EnumType []*EnumDescriptorProto `protobuf:"bytes,4,rep,name=enum_type,json=enumType" json:"enum_type,omitempty"` + ExtensionRange []*DescriptorProto_ExtensionRange `protobuf:"bytes,5,rep,name=extension_range,json=extensionRange" json:"extension_range,omitempty"` + OneofDecl []*OneofDescriptorProto `protobuf:"bytes,8,rep,name=oneof_decl,json=oneofDecl" json:"oneof_decl,omitempty"` + Options *MessageOptions `protobuf:"bytes,7,opt,name=options" json:"options,omitempty"` + ReservedRange []*DescriptorProto_ReservedRange `protobuf:"bytes,9,rep,name=reserved_range,json=reservedRange" json:"reserved_range,omitempty"` + // Reserved field names, which may not be used by fields in the same message. + // A given name may only be reserved once. + ReservedName []string `protobuf:"bytes,10,rep,name=reserved_name,json=reservedName" json:"reserved_name,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *DescriptorProto) Reset() { *m = DescriptorProto{} } +func (m *DescriptorProto) String() string { return proto.CompactTextString(m) } +func (*DescriptorProto) ProtoMessage() {} +func (*DescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } + +func (m *DescriptorProto) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *DescriptorProto) GetField() []*FieldDescriptorProto { + if m != nil { + return m.Field + } + return nil +} + +func (m *DescriptorProto) GetExtension() []*FieldDescriptorProto { + if m != nil { + return m.Extension + } + return nil +} + +func (m *DescriptorProto) GetNestedType() []*DescriptorProto { + if m != nil { + return m.NestedType + } + return nil +} + +func (m *DescriptorProto) GetEnumType() []*EnumDescriptorProto { + if m != nil { + return m.EnumType + } + return nil +} + +func (m *DescriptorProto) GetExtensionRange() []*DescriptorProto_ExtensionRange { + if m != nil { + return m.ExtensionRange + } + return nil +} + +func (m *DescriptorProto) GetOneofDecl() []*OneofDescriptorProto { + if m != nil { + return m.OneofDecl + } + return nil +} + +func (m *DescriptorProto) GetOptions() *MessageOptions { + if m != nil { + return m.Options + } + return nil +} + +func (m *DescriptorProto) GetReservedRange() []*DescriptorProto_ReservedRange { + if m != nil { + return m.ReservedRange + } + return nil +} + +func (m *DescriptorProto) GetReservedName() []string { + if m != nil { + return m.ReservedName + } + return nil +} + +type DescriptorProto_ExtensionRange struct { + Start *int32 `protobuf:"varint,1,opt,name=start" json:"start,omitempty"` + End *int32 `protobuf:"varint,2,opt,name=end" json:"end,omitempty"` + Options *ExtensionRangeOptions `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *DescriptorProto_ExtensionRange) Reset() { *m = DescriptorProto_ExtensionRange{} } +func (m *DescriptorProto_ExtensionRange) String() string { return proto.CompactTextString(m) } +func (*DescriptorProto_ExtensionRange) ProtoMessage() {} +func (*DescriptorProto_ExtensionRange) Descriptor() ([]byte, []int) { + return fileDescriptor0, []int{2, 0} +} + +func (m *DescriptorProto_ExtensionRange) GetStart() int32 { + if m != nil && m.Start != nil { + return *m.Start + } + return 0 +} + +func (m *DescriptorProto_ExtensionRange) GetEnd() int32 { + if m != nil && m.End != nil { + return *m.End + } + return 0 +} + +func (m *DescriptorProto_ExtensionRange) GetOptions() *ExtensionRangeOptions { + if m != nil { + return m.Options + } + return nil +} + +// Range of reserved tag numbers. Reserved tag numbers may not be used by +// fields or extension ranges in the same message. Reserved ranges may +// not overlap. +type DescriptorProto_ReservedRange struct { + Start *int32 `protobuf:"varint,1,opt,name=start" json:"start,omitempty"` + End *int32 `protobuf:"varint,2,opt,name=end" json:"end,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *DescriptorProto_ReservedRange) Reset() { *m = DescriptorProto_ReservedRange{} } +func (m *DescriptorProto_ReservedRange) String() string { return proto.CompactTextString(m) } +func (*DescriptorProto_ReservedRange) ProtoMessage() {} +func (*DescriptorProto_ReservedRange) Descriptor() ([]byte, []int) { + return fileDescriptor0, []int{2, 1} +} + +func (m *DescriptorProto_ReservedRange) GetStart() int32 { + if m != nil && m.Start != nil { + return *m.Start + } + return 0 +} + +func (m *DescriptorProto_ReservedRange) GetEnd() int32 { + if m != nil && m.End != nil { + return *m.End + } + return 0 +} + +type ExtensionRangeOptions struct { + // The parser stores options it doesn't recognize here. See above. + UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *ExtensionRangeOptions) Reset() { *m = ExtensionRangeOptions{} } +func (m *ExtensionRangeOptions) String() string { return proto.CompactTextString(m) } +func (*ExtensionRangeOptions) ProtoMessage() {} +func (*ExtensionRangeOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } + +var extRange_ExtensionRangeOptions = []proto.ExtensionRange{ + {1000, 536870911}, +} + +func (*ExtensionRangeOptions) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_ExtensionRangeOptions +} + +func (m *ExtensionRangeOptions) GetUninterpretedOption() []*UninterpretedOption { + if m != nil { + return m.UninterpretedOption + } + return nil +} + +// Describes a field within a message. +type FieldDescriptorProto struct { + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Number *int32 `protobuf:"varint,3,opt,name=number" json:"number,omitempty"` + Label *FieldDescriptorProto_Label `protobuf:"varint,4,opt,name=label,enum=google.protobuf.FieldDescriptorProto_Label" json:"label,omitempty"` + // If type_name is set, this need not be set. If both this and type_name + // are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + Type *FieldDescriptorProto_Type `protobuf:"varint,5,opt,name=type,enum=google.protobuf.FieldDescriptorProto_Type" json:"type,omitempty"` + // For message and enum types, this is the name of the type. If the name + // starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + // rules are used to find the type (i.e. first the nested types within this + // message are searched, then within the parent, on up to the root + // namespace). + TypeName *string `protobuf:"bytes,6,opt,name=type_name,json=typeName" json:"type_name,omitempty"` + // For extensions, this is the name of the type being extended. It is + // resolved in the same manner as type_name. + Extendee *string `protobuf:"bytes,2,opt,name=extendee" json:"extendee,omitempty"` + // For numeric types, contains the original text representation of the value. + // For booleans, "true" or "false". + // For strings, contains the default text contents (not escaped in any way). + // For bytes, contains the C escaped value. All bytes >= 128 are escaped. + // TODO(kenton): Base-64 encode? + DefaultValue *string `protobuf:"bytes,7,opt,name=default_value,json=defaultValue" json:"default_value,omitempty"` + // If set, gives the index of a oneof in the containing type's oneof_decl + // list. This field is a member of that oneof. + OneofIndex *int32 `protobuf:"varint,9,opt,name=oneof_index,json=oneofIndex" json:"oneof_index,omitempty"` + // JSON name of this field. The value is set by protocol compiler. If the + // user has set a "json_name" option on this field, that option's value + // will be used. Otherwise, it's deduced from the field's name by converting + // it to camelCase. + JsonName *string `protobuf:"bytes,10,opt,name=json_name,json=jsonName" json:"json_name,omitempty"` + Options *FieldOptions `protobuf:"bytes,8,opt,name=options" json:"options,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *FieldDescriptorProto) Reset() { *m = FieldDescriptorProto{} } +func (m *FieldDescriptorProto) String() string { return proto.CompactTextString(m) } +func (*FieldDescriptorProto) ProtoMessage() {} +func (*FieldDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } + +func (m *FieldDescriptorProto) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *FieldDescriptorProto) GetNumber() int32 { + if m != nil && m.Number != nil { + return *m.Number + } + return 0 +} + +func (m *FieldDescriptorProto) GetLabel() FieldDescriptorProto_Label { + if m != nil && m.Label != nil { + return *m.Label + } + return FieldDescriptorProto_LABEL_OPTIONAL +} + +func (m *FieldDescriptorProto) GetType() FieldDescriptorProto_Type { + if m != nil && m.Type != nil { + return *m.Type + } + return FieldDescriptorProto_TYPE_DOUBLE +} + +func (m *FieldDescriptorProto) GetTypeName() string { + if m != nil && m.TypeName != nil { + return *m.TypeName + } + return "" +} + +func (m *FieldDescriptorProto) GetExtendee() string { + if m != nil && m.Extendee != nil { + return *m.Extendee + } + return "" +} + +func (m *FieldDescriptorProto) GetDefaultValue() string { + if m != nil && m.DefaultValue != nil { + return *m.DefaultValue + } + return "" +} + +func (m *FieldDescriptorProto) GetOneofIndex() int32 { + if m != nil && m.OneofIndex != nil { + return *m.OneofIndex + } + return 0 +} + +func (m *FieldDescriptorProto) GetJsonName() string { + if m != nil && m.JsonName != nil { + return *m.JsonName + } + return "" +} + +func (m *FieldDescriptorProto) GetOptions() *FieldOptions { + if m != nil { + return m.Options + } + return nil +} + +// Describes a oneof. +type OneofDescriptorProto struct { + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Options *OneofOptions `protobuf:"bytes,2,opt,name=options" json:"options,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *OneofDescriptorProto) Reset() { *m = OneofDescriptorProto{} } +func (m *OneofDescriptorProto) String() string { return proto.CompactTextString(m) } +func (*OneofDescriptorProto) ProtoMessage() {} +func (*OneofDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} } + +func (m *OneofDescriptorProto) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *OneofDescriptorProto) GetOptions() *OneofOptions { + if m != nil { + return m.Options + } + return nil +} + +// Describes an enum type. +type EnumDescriptorProto struct { + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Value []*EnumValueDescriptorProto `protobuf:"bytes,2,rep,name=value" json:"value,omitempty"` + Options *EnumOptions `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *EnumDescriptorProto) Reset() { *m = EnumDescriptorProto{} } +func (m *EnumDescriptorProto) String() string { return proto.CompactTextString(m) } +func (*EnumDescriptorProto) ProtoMessage() {} +func (*EnumDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{6} } + +func (m *EnumDescriptorProto) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *EnumDescriptorProto) GetValue() []*EnumValueDescriptorProto { + if m != nil { + return m.Value + } + return nil +} + +func (m *EnumDescriptorProto) GetOptions() *EnumOptions { + if m != nil { + return m.Options + } + return nil +} + +// Describes a value within an enum. +type EnumValueDescriptorProto struct { + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Number *int32 `protobuf:"varint,2,opt,name=number" json:"number,omitempty"` + Options *EnumValueOptions `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *EnumValueDescriptorProto) Reset() { *m = EnumValueDescriptorProto{} } +func (m *EnumValueDescriptorProto) String() string { return proto.CompactTextString(m) } +func (*EnumValueDescriptorProto) ProtoMessage() {} +func (*EnumValueDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{7} } + +func (m *EnumValueDescriptorProto) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *EnumValueDescriptorProto) GetNumber() int32 { + if m != nil && m.Number != nil { + return *m.Number + } + return 0 +} + +func (m *EnumValueDescriptorProto) GetOptions() *EnumValueOptions { + if m != nil { + return m.Options + } + return nil +} + +// Describes a service. +type ServiceDescriptorProto struct { + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Method []*MethodDescriptorProto `protobuf:"bytes,2,rep,name=method" json:"method,omitempty"` + Options *ServiceOptions `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *ServiceDescriptorProto) Reset() { *m = ServiceDescriptorProto{} } +func (m *ServiceDescriptorProto) String() string { return proto.CompactTextString(m) } +func (*ServiceDescriptorProto) ProtoMessage() {} +func (*ServiceDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{8} } + +func (m *ServiceDescriptorProto) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *ServiceDescriptorProto) GetMethod() []*MethodDescriptorProto { + if m != nil { + return m.Method + } + return nil +} + +func (m *ServiceDescriptorProto) GetOptions() *ServiceOptions { + if m != nil { + return m.Options + } + return nil +} + +// Describes a method of a service. +type MethodDescriptorProto struct { + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Input and output type names. These are resolved in the same way as + // FieldDescriptorProto.type_name, but must refer to a message type. + InputType *string `protobuf:"bytes,2,opt,name=input_type,json=inputType" json:"input_type,omitempty"` + OutputType *string `protobuf:"bytes,3,opt,name=output_type,json=outputType" json:"output_type,omitempty"` + Options *MethodOptions `protobuf:"bytes,4,opt,name=options" json:"options,omitempty"` + // Identifies if client streams multiple client messages + ClientStreaming *bool `protobuf:"varint,5,opt,name=client_streaming,json=clientStreaming,def=0" json:"client_streaming,omitempty"` + // Identifies if server streams multiple server messages + ServerStreaming *bool `protobuf:"varint,6,opt,name=server_streaming,json=serverStreaming,def=0" json:"server_streaming,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *MethodDescriptorProto) Reset() { *m = MethodDescriptorProto{} } +func (m *MethodDescriptorProto) String() string { return proto.CompactTextString(m) } +func (*MethodDescriptorProto) ProtoMessage() {} +func (*MethodDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{9} } + +const Default_MethodDescriptorProto_ClientStreaming bool = false +const Default_MethodDescriptorProto_ServerStreaming bool = false + +func (m *MethodDescriptorProto) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *MethodDescriptorProto) GetInputType() string { + if m != nil && m.InputType != nil { + return *m.InputType + } + return "" +} + +func (m *MethodDescriptorProto) GetOutputType() string { + if m != nil && m.OutputType != nil { + return *m.OutputType + } + return "" +} + +func (m *MethodDescriptorProto) GetOptions() *MethodOptions { + if m != nil { + return m.Options + } + return nil +} + +func (m *MethodDescriptorProto) GetClientStreaming() bool { + if m != nil && m.ClientStreaming != nil { + return *m.ClientStreaming + } + return Default_MethodDescriptorProto_ClientStreaming +} + +func (m *MethodDescriptorProto) GetServerStreaming() bool { + if m != nil && m.ServerStreaming != nil { + return *m.ServerStreaming + } + return Default_MethodDescriptorProto_ServerStreaming +} + +type FileOptions struct { + // Sets the Java package where classes generated from this .proto will be + // placed. By default, the proto package is used, but this is often + // inappropriate because proto packages do not normally start with backwards + // domain names. + JavaPackage *string `protobuf:"bytes,1,opt,name=java_package,json=javaPackage" json:"java_package,omitempty"` + // If set, all the classes from the .proto file are wrapped in a single + // outer class with the given name. This applies to both Proto1 + // (equivalent to the old "--one_java_file" option) and Proto2 (where + // a .proto always translates to a single class, but you may want to + // explicitly choose the class name). + JavaOuterClassname *string `protobuf:"bytes,8,opt,name=java_outer_classname,json=javaOuterClassname" json:"java_outer_classname,omitempty"` + // If set true, then the Java code generator will generate a separate .java + // file for each top-level message, enum, and service defined in the .proto + // file. Thus, these types will *not* be nested inside the outer class + // named by java_outer_classname. However, the outer class will still be + // generated to contain the file's getDescriptor() method as well as any + // top-level extensions defined in the file. + JavaMultipleFiles *bool `protobuf:"varint,10,opt,name=java_multiple_files,json=javaMultipleFiles,def=0" json:"java_multiple_files,omitempty"` + // This option does nothing. + JavaGenerateEqualsAndHash *bool `protobuf:"varint,20,opt,name=java_generate_equals_and_hash,json=javaGenerateEqualsAndHash" json:"java_generate_equals_and_hash,omitempty"` + // If set true, then the Java2 code generator will generate code that + // throws an exception whenever an attempt is made to assign a non-UTF-8 + // byte sequence to a string field. + // Message reflection will do the same. + // However, an extension field still accepts non-UTF-8 byte sequences. + // This option has no effect on when used with the lite runtime. + JavaStringCheckUtf8 *bool `protobuf:"varint,27,opt,name=java_string_check_utf8,json=javaStringCheckUtf8,def=0" json:"java_string_check_utf8,omitempty"` + OptimizeFor *FileOptions_OptimizeMode `protobuf:"varint,9,opt,name=optimize_for,json=optimizeFor,enum=google.protobuf.FileOptions_OptimizeMode,def=1" json:"optimize_for,omitempty"` + // Sets the Go package where structs generated from this .proto will be + // placed. If omitted, the Go package will be derived from the following: + // - The basename of the package import path, if provided. + // - Otherwise, the package statement in the .proto file, if present. + // - Otherwise, the basename of the .proto file, without extension. + GoPackage *string `protobuf:"bytes,11,opt,name=go_package,json=goPackage" json:"go_package,omitempty"` + // Should generic services be generated in each language? "Generic" services + // are not specific to any particular RPC system. They are generated by the + // main code generators in each language (without additional plugins). + // Generic services were the only kind of service generation supported by + // early versions of google.protobuf. + // + // Generic services are now considered deprecated in favor of using plugins + // that generate code specific to your particular RPC system. Therefore, + // these default to false. Old code which depends on generic services should + // explicitly set them to true. + CcGenericServices *bool `protobuf:"varint,16,opt,name=cc_generic_services,json=ccGenericServices,def=0" json:"cc_generic_services,omitempty"` + JavaGenericServices *bool `protobuf:"varint,17,opt,name=java_generic_services,json=javaGenericServices,def=0" json:"java_generic_services,omitempty"` + PyGenericServices *bool `protobuf:"varint,18,opt,name=py_generic_services,json=pyGenericServices,def=0" json:"py_generic_services,omitempty"` + PhpGenericServices *bool `protobuf:"varint,42,opt,name=php_generic_services,json=phpGenericServices,def=0" json:"php_generic_services,omitempty"` + // Is this file deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for everything in the file, or it will be completely ignored; in the very + // least, this is a formalization for deprecating files. + Deprecated *bool `protobuf:"varint,23,opt,name=deprecated,def=0" json:"deprecated,omitempty"` + // Enables the use of arenas for the proto messages in this file. This applies + // only to generated classes for C++. + CcEnableArenas *bool `protobuf:"varint,31,opt,name=cc_enable_arenas,json=ccEnableArenas,def=0" json:"cc_enable_arenas,omitempty"` + // Sets the objective c class prefix which is prepended to all objective c + // generated classes from this .proto. There is no default. + ObjcClassPrefix *string `protobuf:"bytes,36,opt,name=objc_class_prefix,json=objcClassPrefix" json:"objc_class_prefix,omitempty"` + // Namespace for generated classes; defaults to the package. + CsharpNamespace *string `protobuf:"bytes,37,opt,name=csharp_namespace,json=csharpNamespace" json:"csharp_namespace,omitempty"` + // By default Swift generators will take the proto package and CamelCase it + // replacing '.' with underscore and use that to prefix the types/symbols + // defined. When this options is provided, they will use this value instead + // to prefix the types/symbols defined. + SwiftPrefix *string `protobuf:"bytes,39,opt,name=swift_prefix,json=swiftPrefix" json:"swift_prefix,omitempty"` + // Sets the php class prefix which is prepended to all php generated classes + // from this .proto. Default is empty. + PhpClassPrefix *string `protobuf:"bytes,40,opt,name=php_class_prefix,json=phpClassPrefix" json:"php_class_prefix,omitempty"` + // Use this option to change the namespace of php generated classes. Default + // is empty. When this option is empty, the package name will be used for + // determining the namespace. + PhpNamespace *string `protobuf:"bytes,41,opt,name=php_namespace,json=phpNamespace" json:"php_namespace,omitempty"` + // The parser stores options it doesn't recognize here. See above. + UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *FileOptions) Reset() { *m = FileOptions{} } +func (m *FileOptions) String() string { return proto.CompactTextString(m) } +func (*FileOptions) ProtoMessage() {} +func (*FileOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{10} } + +var extRange_FileOptions = []proto.ExtensionRange{ + {1000, 536870911}, +} + +func (*FileOptions) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_FileOptions +} + +const Default_FileOptions_JavaMultipleFiles bool = false +const Default_FileOptions_JavaStringCheckUtf8 bool = false +const Default_FileOptions_OptimizeFor FileOptions_OptimizeMode = FileOptions_SPEED +const Default_FileOptions_CcGenericServices bool = false +const Default_FileOptions_JavaGenericServices bool = false +const Default_FileOptions_PyGenericServices bool = false +const Default_FileOptions_PhpGenericServices bool = false +const Default_FileOptions_Deprecated bool = false +const Default_FileOptions_CcEnableArenas bool = false + +func (m *FileOptions) GetJavaPackage() string { + if m != nil && m.JavaPackage != nil { + return *m.JavaPackage + } + return "" +} + +func (m *FileOptions) GetJavaOuterClassname() string { + if m != nil && m.JavaOuterClassname != nil { + return *m.JavaOuterClassname + } + return "" +} + +func (m *FileOptions) GetJavaMultipleFiles() bool { + if m != nil && m.JavaMultipleFiles != nil { + return *m.JavaMultipleFiles + } + return Default_FileOptions_JavaMultipleFiles +} + +func (m *FileOptions) GetJavaGenerateEqualsAndHash() bool { + if m != nil && m.JavaGenerateEqualsAndHash != nil { + return *m.JavaGenerateEqualsAndHash + } + return false +} + +func (m *FileOptions) GetJavaStringCheckUtf8() bool { + if m != nil && m.JavaStringCheckUtf8 != nil { + return *m.JavaStringCheckUtf8 + } + return Default_FileOptions_JavaStringCheckUtf8 +} + +func (m *FileOptions) GetOptimizeFor() FileOptions_OptimizeMode { + if m != nil && m.OptimizeFor != nil { + return *m.OptimizeFor + } + return Default_FileOptions_OptimizeFor +} + +func (m *FileOptions) GetGoPackage() string { + if m != nil && m.GoPackage != nil { + return *m.GoPackage + } + return "" +} + +func (m *FileOptions) GetCcGenericServices() bool { + if m != nil && m.CcGenericServices != nil { + return *m.CcGenericServices + } + return Default_FileOptions_CcGenericServices +} + +func (m *FileOptions) GetJavaGenericServices() bool { + if m != nil && m.JavaGenericServices != nil { + return *m.JavaGenericServices + } + return Default_FileOptions_JavaGenericServices +} + +func (m *FileOptions) GetPyGenericServices() bool { + if m != nil && m.PyGenericServices != nil { + return *m.PyGenericServices + } + return Default_FileOptions_PyGenericServices +} + +func (m *FileOptions) GetPhpGenericServices() bool { + if m != nil && m.PhpGenericServices != nil { + return *m.PhpGenericServices + } + return Default_FileOptions_PhpGenericServices +} + +func (m *FileOptions) GetDeprecated() bool { + if m != nil && m.Deprecated != nil { + return *m.Deprecated + } + return Default_FileOptions_Deprecated +} + +func (m *FileOptions) GetCcEnableArenas() bool { + if m != nil && m.CcEnableArenas != nil { + return *m.CcEnableArenas + } + return Default_FileOptions_CcEnableArenas +} + +func (m *FileOptions) GetObjcClassPrefix() string { + if m != nil && m.ObjcClassPrefix != nil { + return *m.ObjcClassPrefix + } + return "" +} + +func (m *FileOptions) GetCsharpNamespace() string { + if m != nil && m.CsharpNamespace != nil { + return *m.CsharpNamespace + } + return "" +} + +func (m *FileOptions) GetSwiftPrefix() string { + if m != nil && m.SwiftPrefix != nil { + return *m.SwiftPrefix + } + return "" +} + +func (m *FileOptions) GetPhpClassPrefix() string { + if m != nil && m.PhpClassPrefix != nil { + return *m.PhpClassPrefix + } + return "" +} + +func (m *FileOptions) GetPhpNamespace() string { + if m != nil && m.PhpNamespace != nil { + return *m.PhpNamespace + } + return "" +} + +func (m *FileOptions) GetUninterpretedOption() []*UninterpretedOption { + if m != nil { + return m.UninterpretedOption + } + return nil +} + +type MessageOptions struct { + // Set true to use the old proto1 MessageSet wire format for extensions. + // This is provided for backwards-compatibility with the MessageSet wire + // format. You should not use this for any other reason: It's less + // efficient, has fewer features, and is more complicated. + // + // The message must be defined exactly as follows: + // message Foo { + // option message_set_wire_format = true; + // extensions 4 to max; + // } + // Note that the message cannot have any defined fields; MessageSets only + // have extensions. + // + // All extensions of your type must be singular messages; e.g. they cannot + // be int32s, enums, or repeated messages. + // + // Because this is an option, the above two restrictions are not enforced by + // the protocol compiler. + MessageSetWireFormat *bool `protobuf:"varint,1,opt,name=message_set_wire_format,json=messageSetWireFormat,def=0" json:"message_set_wire_format,omitempty"` + // Disables the generation of the standard "descriptor()" accessor, which can + // conflict with a field of the same name. This is meant to make migration + // from proto1 easier; new code should avoid fields named "descriptor". + NoStandardDescriptorAccessor *bool `protobuf:"varint,2,opt,name=no_standard_descriptor_accessor,json=noStandardDescriptorAccessor,def=0" json:"no_standard_descriptor_accessor,omitempty"` + // Is this message deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the message, or it will be completely ignored; in the very least, + // this is a formalization for deprecating messages. + Deprecated *bool `protobuf:"varint,3,opt,name=deprecated,def=0" json:"deprecated,omitempty"` + // Whether the message is an automatically generated map entry type for the + // maps field. + // + // For maps fields: + // map map_field = 1; + // The parsed descriptor looks like: + // message MapFieldEntry { + // option map_entry = true; + // optional KeyType key = 1; + // optional ValueType value = 2; + // } + // repeated MapFieldEntry map_field = 1; + // + // Implementations may choose not to generate the map_entry=true message, but + // use a native map in the target language to hold the keys and values. + // The reflection APIs in such implementions still need to work as + // if the field is a repeated message field. + // + // NOTE: Do not set the option in .proto files. Always use the maps syntax + // instead. The option should only be implicitly set by the proto compiler + // parser. + MapEntry *bool `protobuf:"varint,7,opt,name=map_entry,json=mapEntry" json:"map_entry,omitempty"` + // The parser stores options it doesn't recognize here. See above. + UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *MessageOptions) Reset() { *m = MessageOptions{} } +func (m *MessageOptions) String() string { return proto.CompactTextString(m) } +func (*MessageOptions) ProtoMessage() {} +func (*MessageOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{11} } + +var extRange_MessageOptions = []proto.ExtensionRange{ + {1000, 536870911}, +} + +func (*MessageOptions) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_MessageOptions +} + +const Default_MessageOptions_MessageSetWireFormat bool = false +const Default_MessageOptions_NoStandardDescriptorAccessor bool = false +const Default_MessageOptions_Deprecated bool = false + +func (m *MessageOptions) GetMessageSetWireFormat() bool { + if m != nil && m.MessageSetWireFormat != nil { + return *m.MessageSetWireFormat + } + return Default_MessageOptions_MessageSetWireFormat +} + +func (m *MessageOptions) GetNoStandardDescriptorAccessor() bool { + if m != nil && m.NoStandardDescriptorAccessor != nil { + return *m.NoStandardDescriptorAccessor + } + return Default_MessageOptions_NoStandardDescriptorAccessor +} + +func (m *MessageOptions) GetDeprecated() bool { + if m != nil && m.Deprecated != nil { + return *m.Deprecated + } + return Default_MessageOptions_Deprecated +} + +func (m *MessageOptions) GetMapEntry() bool { + if m != nil && m.MapEntry != nil { + return *m.MapEntry + } + return false +} + +func (m *MessageOptions) GetUninterpretedOption() []*UninterpretedOption { + if m != nil { + return m.UninterpretedOption + } + return nil +} + +type FieldOptions struct { + // The ctype option instructs the C++ code generator to use a different + // representation of the field than it normally would. See the specific + // options below. This option is not yet implemented in the open source + // release -- sorry, we'll try to include it in a future version! + Ctype *FieldOptions_CType `protobuf:"varint,1,opt,name=ctype,enum=google.protobuf.FieldOptions_CType,def=0" json:"ctype,omitempty"` + // The packed option can be enabled for repeated primitive fields to enable + // a more efficient representation on the wire. Rather than repeatedly + // writing the tag and type for each element, the entire array is encoded as + // a single length-delimited blob. In proto3, only explicit setting it to + // false will avoid using packed encoding. + Packed *bool `protobuf:"varint,2,opt,name=packed" json:"packed,omitempty"` + // The jstype option determines the JavaScript type used for values of the + // field. The option is permitted only for 64 bit integral and fixed types + // (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + // is represented as JavaScript string, which avoids loss of precision that + // can happen when a large value is converted to a floating point JavaScript. + // Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + // use the JavaScript "number" type. The behavior of the default option + // JS_NORMAL is implementation dependent. + // + // This option is an enum to permit additional types to be added, e.g. + // goog.math.Integer. + Jstype *FieldOptions_JSType `protobuf:"varint,6,opt,name=jstype,enum=google.protobuf.FieldOptions_JSType,def=0" json:"jstype,omitempty"` + // Should this field be parsed lazily? Lazy applies only to message-type + // fields. It means that when the outer message is initially parsed, the + // inner message's contents will not be parsed but instead stored in encoded + // form. The inner message will actually be parsed when it is first accessed. + // + // This is only a hint. Implementations are free to choose whether to use + // eager or lazy parsing regardless of the value of this option. However, + // setting this option true suggests that the protocol author believes that + // using lazy parsing on this field is worth the additional bookkeeping + // overhead typically needed to implement it. + // + // This option does not affect the public interface of any generated code; + // all method signatures remain the same. Furthermore, thread-safety of the + // interface is not affected by this option; const methods remain safe to + // call from multiple threads concurrently, while non-const methods continue + // to require exclusive access. + // + // + // Note that implementations may choose not to check required fields within + // a lazy sub-message. That is, calling IsInitialized() on the outer message + // may return true even if the inner message has missing required fields. + // This is necessary because otherwise the inner message would have to be + // parsed in order to perform the check, defeating the purpose of lazy + // parsing. An implementation which chooses not to check required fields + // must be consistent about it. That is, for any particular sub-message, the + // implementation must either *always* check its required fields, or *never* + // check its required fields, regardless of whether or not the message has + // been parsed. + Lazy *bool `protobuf:"varint,5,opt,name=lazy,def=0" json:"lazy,omitempty"` + // Is this field deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for accessors, or it will be completely ignored; in the very least, this + // is a formalization for deprecating fields. + Deprecated *bool `protobuf:"varint,3,opt,name=deprecated,def=0" json:"deprecated,omitempty"` + // For Google-internal migration only. Do not use. + Weak *bool `protobuf:"varint,10,opt,name=weak,def=0" json:"weak,omitempty"` + // The parser stores options it doesn't recognize here. See above. + UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *FieldOptions) Reset() { *m = FieldOptions{} } +func (m *FieldOptions) String() string { return proto.CompactTextString(m) } +func (*FieldOptions) ProtoMessage() {} +func (*FieldOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{12} } + +var extRange_FieldOptions = []proto.ExtensionRange{ + {1000, 536870911}, +} + +func (*FieldOptions) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_FieldOptions +} + +const Default_FieldOptions_Ctype FieldOptions_CType = FieldOptions_STRING +const Default_FieldOptions_Jstype FieldOptions_JSType = FieldOptions_JS_NORMAL +const Default_FieldOptions_Lazy bool = false +const Default_FieldOptions_Deprecated bool = false +const Default_FieldOptions_Weak bool = false + +func (m *FieldOptions) GetCtype() FieldOptions_CType { + if m != nil && m.Ctype != nil { + return *m.Ctype + } + return Default_FieldOptions_Ctype +} + +func (m *FieldOptions) GetPacked() bool { + if m != nil && m.Packed != nil { + return *m.Packed + } + return false +} + +func (m *FieldOptions) GetJstype() FieldOptions_JSType { + if m != nil && m.Jstype != nil { + return *m.Jstype + } + return Default_FieldOptions_Jstype +} + +func (m *FieldOptions) GetLazy() bool { + if m != nil && m.Lazy != nil { + return *m.Lazy + } + return Default_FieldOptions_Lazy +} + +func (m *FieldOptions) GetDeprecated() bool { + if m != nil && m.Deprecated != nil { + return *m.Deprecated + } + return Default_FieldOptions_Deprecated +} + +func (m *FieldOptions) GetWeak() bool { + if m != nil && m.Weak != nil { + return *m.Weak + } + return Default_FieldOptions_Weak +} + +func (m *FieldOptions) GetUninterpretedOption() []*UninterpretedOption { + if m != nil { + return m.UninterpretedOption + } + return nil +} + +type OneofOptions struct { + // The parser stores options it doesn't recognize here. See above. + UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *OneofOptions) Reset() { *m = OneofOptions{} } +func (m *OneofOptions) String() string { return proto.CompactTextString(m) } +func (*OneofOptions) ProtoMessage() {} +func (*OneofOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{13} } + +var extRange_OneofOptions = []proto.ExtensionRange{ + {1000, 536870911}, +} + +func (*OneofOptions) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_OneofOptions +} + +func (m *OneofOptions) GetUninterpretedOption() []*UninterpretedOption { + if m != nil { + return m.UninterpretedOption + } + return nil +} + +type EnumOptions struct { + // Set this option to true to allow mapping different tag names to the same + // value. + AllowAlias *bool `protobuf:"varint,2,opt,name=allow_alias,json=allowAlias" json:"allow_alias,omitempty"` + // Is this enum deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the enum, or it will be completely ignored; in the very least, this + // is a formalization for deprecating enums. + Deprecated *bool `protobuf:"varint,3,opt,name=deprecated,def=0" json:"deprecated,omitempty"` + // The parser stores options it doesn't recognize here. See above. + UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *EnumOptions) Reset() { *m = EnumOptions{} } +func (m *EnumOptions) String() string { return proto.CompactTextString(m) } +func (*EnumOptions) ProtoMessage() {} +func (*EnumOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{14} } + +var extRange_EnumOptions = []proto.ExtensionRange{ + {1000, 536870911}, +} + +func (*EnumOptions) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_EnumOptions +} + +const Default_EnumOptions_Deprecated bool = false + +func (m *EnumOptions) GetAllowAlias() bool { + if m != nil && m.AllowAlias != nil { + return *m.AllowAlias + } + return false +} + +func (m *EnumOptions) GetDeprecated() bool { + if m != nil && m.Deprecated != nil { + return *m.Deprecated + } + return Default_EnumOptions_Deprecated +} + +func (m *EnumOptions) GetUninterpretedOption() []*UninterpretedOption { + if m != nil { + return m.UninterpretedOption + } + return nil +} + +type EnumValueOptions struct { + // Is this enum value deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the enum value, or it will be completely ignored; in the very least, + // this is a formalization for deprecating enum values. + Deprecated *bool `protobuf:"varint,1,opt,name=deprecated,def=0" json:"deprecated,omitempty"` + // The parser stores options it doesn't recognize here. See above. + UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *EnumValueOptions) Reset() { *m = EnumValueOptions{} } +func (m *EnumValueOptions) String() string { return proto.CompactTextString(m) } +func (*EnumValueOptions) ProtoMessage() {} +func (*EnumValueOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{15} } + +var extRange_EnumValueOptions = []proto.ExtensionRange{ + {1000, 536870911}, +} + +func (*EnumValueOptions) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_EnumValueOptions +} + +const Default_EnumValueOptions_Deprecated bool = false + +func (m *EnumValueOptions) GetDeprecated() bool { + if m != nil && m.Deprecated != nil { + return *m.Deprecated + } + return Default_EnumValueOptions_Deprecated +} + +func (m *EnumValueOptions) GetUninterpretedOption() []*UninterpretedOption { + if m != nil { + return m.UninterpretedOption + } + return nil +} + +type ServiceOptions struct { + // Is this service deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the service, or it will be completely ignored; in the very least, + // this is a formalization for deprecating services. + Deprecated *bool `protobuf:"varint,33,opt,name=deprecated,def=0" json:"deprecated,omitempty"` + // The parser stores options it doesn't recognize here. See above. + UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *ServiceOptions) Reset() { *m = ServiceOptions{} } +func (m *ServiceOptions) String() string { return proto.CompactTextString(m) } +func (*ServiceOptions) ProtoMessage() {} +func (*ServiceOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{16} } + +var extRange_ServiceOptions = []proto.ExtensionRange{ + {1000, 536870911}, +} + +func (*ServiceOptions) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_ServiceOptions +} + +const Default_ServiceOptions_Deprecated bool = false + +func (m *ServiceOptions) GetDeprecated() bool { + if m != nil && m.Deprecated != nil { + return *m.Deprecated + } + return Default_ServiceOptions_Deprecated +} + +func (m *ServiceOptions) GetUninterpretedOption() []*UninterpretedOption { + if m != nil { + return m.UninterpretedOption + } + return nil +} + +type MethodOptions struct { + // Is this method deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the method, or it will be completely ignored; in the very least, + // this is a formalization for deprecating methods. + Deprecated *bool `protobuf:"varint,33,opt,name=deprecated,def=0" json:"deprecated,omitempty"` + IdempotencyLevel *MethodOptions_IdempotencyLevel `protobuf:"varint,34,opt,name=idempotency_level,json=idempotencyLevel,enum=google.protobuf.MethodOptions_IdempotencyLevel,def=0" json:"idempotency_level,omitempty"` + // The parser stores options it doesn't recognize here. See above. + UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *MethodOptions) Reset() { *m = MethodOptions{} } +func (m *MethodOptions) String() string { return proto.CompactTextString(m) } +func (*MethodOptions) ProtoMessage() {} +func (*MethodOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{17} } + +var extRange_MethodOptions = []proto.ExtensionRange{ + {1000, 536870911}, +} + +func (*MethodOptions) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_MethodOptions +} + +const Default_MethodOptions_Deprecated bool = false +const Default_MethodOptions_IdempotencyLevel MethodOptions_IdempotencyLevel = MethodOptions_IDEMPOTENCY_UNKNOWN + +func (m *MethodOptions) GetDeprecated() bool { + if m != nil && m.Deprecated != nil { + return *m.Deprecated + } + return Default_MethodOptions_Deprecated +} + +func (m *MethodOptions) GetIdempotencyLevel() MethodOptions_IdempotencyLevel { + if m != nil && m.IdempotencyLevel != nil { + return *m.IdempotencyLevel + } + return Default_MethodOptions_IdempotencyLevel +} + +func (m *MethodOptions) GetUninterpretedOption() []*UninterpretedOption { + if m != nil { + return m.UninterpretedOption + } + return nil +} + +// A message representing a option the parser does not recognize. This only +// appears in options protos created by the compiler::Parser class. +// DescriptorPool resolves these when building Descriptor objects. Therefore, +// options protos in descriptor objects (e.g. returned by Descriptor::options(), +// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions +// in them. +type UninterpretedOption struct { + Name []*UninterpretedOption_NamePart `protobuf:"bytes,2,rep,name=name" json:"name,omitempty"` + // The value of the uninterpreted option, in whatever type the tokenizer + // identified it as during parsing. Exactly one of these should be set. + IdentifierValue *string `protobuf:"bytes,3,opt,name=identifier_value,json=identifierValue" json:"identifier_value,omitempty"` + PositiveIntValue *uint64 `protobuf:"varint,4,opt,name=positive_int_value,json=positiveIntValue" json:"positive_int_value,omitempty"` + NegativeIntValue *int64 `protobuf:"varint,5,opt,name=negative_int_value,json=negativeIntValue" json:"negative_int_value,omitempty"` + DoubleValue *float64 `protobuf:"fixed64,6,opt,name=double_value,json=doubleValue" json:"double_value,omitempty"` + StringValue []byte `protobuf:"bytes,7,opt,name=string_value,json=stringValue" json:"string_value,omitempty"` + AggregateValue *string `protobuf:"bytes,8,opt,name=aggregate_value,json=aggregateValue" json:"aggregate_value,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *UninterpretedOption) Reset() { *m = UninterpretedOption{} } +func (m *UninterpretedOption) String() string { return proto.CompactTextString(m) } +func (*UninterpretedOption) ProtoMessage() {} +func (*UninterpretedOption) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{18} } + +func (m *UninterpretedOption) GetName() []*UninterpretedOption_NamePart { + if m != nil { + return m.Name + } + return nil +} + +func (m *UninterpretedOption) GetIdentifierValue() string { + if m != nil && m.IdentifierValue != nil { + return *m.IdentifierValue + } + return "" +} + +func (m *UninterpretedOption) GetPositiveIntValue() uint64 { + if m != nil && m.PositiveIntValue != nil { + return *m.PositiveIntValue + } + return 0 +} + +func (m *UninterpretedOption) GetNegativeIntValue() int64 { + if m != nil && m.NegativeIntValue != nil { + return *m.NegativeIntValue + } + return 0 +} + +func (m *UninterpretedOption) GetDoubleValue() float64 { + if m != nil && m.DoubleValue != nil { + return *m.DoubleValue + } + return 0 +} + +func (m *UninterpretedOption) GetStringValue() []byte { + if m != nil { + return m.StringValue + } + return nil +} + +func (m *UninterpretedOption) GetAggregateValue() string { + if m != nil && m.AggregateValue != nil { + return *m.AggregateValue + } + return "" +} + +// The name of the uninterpreted option. Each string represents a segment in +// a dot-separated name. is_extension is true iff a segment represents an +// extension (denoted with parentheses in options specs in .proto files). +// E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents +// "foo.(bar.baz).qux". +type UninterpretedOption_NamePart struct { + NamePart *string `protobuf:"bytes,1,req,name=name_part,json=namePart" json:"name_part,omitempty"` + IsExtension *bool `protobuf:"varint,2,req,name=is_extension,json=isExtension" json:"is_extension,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *UninterpretedOption_NamePart) Reset() { *m = UninterpretedOption_NamePart{} } +func (m *UninterpretedOption_NamePart) String() string { return proto.CompactTextString(m) } +func (*UninterpretedOption_NamePart) ProtoMessage() {} +func (*UninterpretedOption_NamePart) Descriptor() ([]byte, []int) { + return fileDescriptor0, []int{18, 0} +} + +func (m *UninterpretedOption_NamePart) GetNamePart() string { + if m != nil && m.NamePart != nil { + return *m.NamePart + } + return "" +} + +func (m *UninterpretedOption_NamePart) GetIsExtension() bool { + if m != nil && m.IsExtension != nil { + return *m.IsExtension + } + return false +} + +// Encapsulates information about the original source file from which a +// FileDescriptorProto was generated. +type SourceCodeInfo struct { + // A Location identifies a piece of source code in a .proto file which + // corresponds to a particular definition. This information is intended + // to be useful to IDEs, code indexers, documentation generators, and similar + // tools. + // + // For example, say we have a file like: + // message Foo { + // optional string foo = 1; + // } + // Let's look at just the field definition: + // optional string foo = 1; + // ^ ^^ ^^ ^ ^^^ + // a bc de f ghi + // We have the following locations: + // span path represents + // [a,i) [ 4, 0, 2, 0 ] The whole field definition. + // [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + // [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + // [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + // [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + // + // Notes: + // - A location may refer to a repeated field itself (i.e. not to any + // particular index within it). This is used whenever a set of elements are + // logically enclosed in a single code segment. For example, an entire + // extend block (possibly containing multiple extension definitions) will + // have an outer location whose path refers to the "extensions" repeated + // field without an index. + // - Multiple locations may have the same path. This happens when a single + // logical declaration is spread out across multiple places. The most + // obvious example is the "extend" block again -- there may be multiple + // extend blocks in the same scope, each of which will have the same path. + // - A location's span is not always a subset of its parent's span. For + // example, the "extendee" of an extension declaration appears at the + // beginning of the "extend" block and is shared by all extensions within + // the block. + // - Just because a location's span is a subset of some other location's span + // does not mean that it is a descendent. For example, a "group" defines + // both a type and a field in a single declaration. Thus, the locations + // corresponding to the type and field and their components will overlap. + // - Code which tries to interpret locations should probably be designed to + // ignore those that it doesn't understand, as more types of locations could + // be recorded in the future. + Location []*SourceCodeInfo_Location `protobuf:"bytes,1,rep,name=location" json:"location,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *SourceCodeInfo) Reset() { *m = SourceCodeInfo{} } +func (m *SourceCodeInfo) String() string { return proto.CompactTextString(m) } +func (*SourceCodeInfo) ProtoMessage() {} +func (*SourceCodeInfo) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{19} } + +func (m *SourceCodeInfo) GetLocation() []*SourceCodeInfo_Location { + if m != nil { + return m.Location + } + return nil +} + +type SourceCodeInfo_Location struct { + // Identifies which part of the FileDescriptorProto was defined at this + // location. + // + // Each element is a field number or an index. They form a path from + // the root FileDescriptorProto to the place where the definition. For + // example, this path: + // [ 4, 3, 2, 7, 1 ] + // refers to: + // file.message_type(3) // 4, 3 + // .field(7) // 2, 7 + // .name() // 1 + // This is because FileDescriptorProto.message_type has field number 4: + // repeated DescriptorProto message_type = 4; + // and DescriptorProto.field has field number 2: + // repeated FieldDescriptorProto field = 2; + // and FieldDescriptorProto.name has field number 1: + // optional string name = 1; + // + // Thus, the above path gives the location of a field name. If we removed + // the last element: + // [ 4, 3, 2, 7 ] + // this path refers to the whole field declaration (from the beginning + // of the label to the terminating semicolon). + Path []int32 `protobuf:"varint,1,rep,packed,name=path" json:"path,omitempty"` + // Always has exactly three or four elements: start line, start column, + // end line (optional, otherwise assumed same as start line), end column. + // These are packed into a single field for efficiency. Note that line + // and column numbers are zero-based -- typically you will want to add + // 1 to each before displaying to a user. + Span []int32 `protobuf:"varint,2,rep,packed,name=span" json:"span,omitempty"` + // If this SourceCodeInfo represents a complete declaration, these are any + // comments appearing before and after the declaration which appear to be + // attached to the declaration. + // + // A series of line comments appearing on consecutive lines, with no other + // tokens appearing on those lines, will be treated as a single comment. + // + // leading_detached_comments will keep paragraphs of comments that appear + // before (but not connected to) the current element. Each paragraph, + // separated by empty lines, will be one comment element in the repeated + // field. + // + // Only the comment content is provided; comment markers (e.g. //) are + // stripped out. For block comments, leading whitespace and an asterisk + // will be stripped from the beginning of each line other than the first. + // Newlines are included in the output. + // + // Examples: + // + // optional int32 foo = 1; // Comment attached to foo. + // // Comment attached to bar. + // optional int32 bar = 2; + // + // optional string baz = 3; + // // Comment attached to baz. + // // Another line attached to baz. + // + // // Comment attached to qux. + // // + // // Another line attached to qux. + // optional double qux = 4; + // + // // Detached comment for corge. This is not leading or trailing comments + // // to qux or corge because there are blank lines separating it from + // // both. + // + // // Detached comment for corge paragraph 2. + // + // optional string corge = 5; + // /* Block comment attached + // * to corge. Leading asterisks + // * will be removed. */ + // /* Block comment attached to + // * grault. */ + // optional int32 grault = 6; + // + // // ignored detached comments. + LeadingComments *string `protobuf:"bytes,3,opt,name=leading_comments,json=leadingComments" json:"leading_comments,omitempty"` + TrailingComments *string `protobuf:"bytes,4,opt,name=trailing_comments,json=trailingComments" json:"trailing_comments,omitempty"` + LeadingDetachedComments []string `protobuf:"bytes,6,rep,name=leading_detached_comments,json=leadingDetachedComments" json:"leading_detached_comments,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *SourceCodeInfo_Location) Reset() { *m = SourceCodeInfo_Location{} } +func (m *SourceCodeInfo_Location) String() string { return proto.CompactTextString(m) } +func (*SourceCodeInfo_Location) ProtoMessage() {} +func (*SourceCodeInfo_Location) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{19, 0} } + +func (m *SourceCodeInfo_Location) GetPath() []int32 { + if m != nil { + return m.Path + } + return nil +} + +func (m *SourceCodeInfo_Location) GetSpan() []int32 { + if m != nil { + return m.Span + } + return nil +} + +func (m *SourceCodeInfo_Location) GetLeadingComments() string { + if m != nil && m.LeadingComments != nil { + return *m.LeadingComments + } + return "" +} + +func (m *SourceCodeInfo_Location) GetTrailingComments() string { + if m != nil && m.TrailingComments != nil { + return *m.TrailingComments + } + return "" +} + +func (m *SourceCodeInfo_Location) GetLeadingDetachedComments() []string { + if m != nil { + return m.LeadingDetachedComments + } + return nil +} + +// Describes the relationship between generated code and its original source +// file. A GeneratedCodeInfo message is associated with only one generated +// source file, but may contain references to different source .proto files. +type GeneratedCodeInfo struct { + // An Annotation connects some span of text in generated code to an element + // of its generating .proto file. + Annotation []*GeneratedCodeInfo_Annotation `protobuf:"bytes,1,rep,name=annotation" json:"annotation,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *GeneratedCodeInfo) Reset() { *m = GeneratedCodeInfo{} } +func (m *GeneratedCodeInfo) String() string { return proto.CompactTextString(m) } +func (*GeneratedCodeInfo) ProtoMessage() {} +func (*GeneratedCodeInfo) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{20} } + +func (m *GeneratedCodeInfo) GetAnnotation() []*GeneratedCodeInfo_Annotation { + if m != nil { + return m.Annotation + } + return nil +} + +type GeneratedCodeInfo_Annotation struct { + // Identifies the element in the original source .proto file. This field + // is formatted the same as SourceCodeInfo.Location.path. + Path []int32 `protobuf:"varint,1,rep,packed,name=path" json:"path,omitempty"` + // Identifies the filesystem path to the original source .proto. + SourceFile *string `protobuf:"bytes,2,opt,name=source_file,json=sourceFile" json:"source_file,omitempty"` + // Identifies the starting offset in bytes in the generated code + // that relates to the identified object. + Begin *int32 `protobuf:"varint,3,opt,name=begin" json:"begin,omitempty"` + // Identifies the ending offset in bytes in the generated code that + // relates to the identified offset. The end offset should be one past + // the last relevant byte (so the length of the text = end - begin). + End *int32 `protobuf:"varint,4,opt,name=end" json:"end,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *GeneratedCodeInfo_Annotation) Reset() { *m = GeneratedCodeInfo_Annotation{} } +func (m *GeneratedCodeInfo_Annotation) String() string { return proto.CompactTextString(m) } +func (*GeneratedCodeInfo_Annotation) ProtoMessage() {} +func (*GeneratedCodeInfo_Annotation) Descriptor() ([]byte, []int) { + return fileDescriptor0, []int{20, 0} +} + +func (m *GeneratedCodeInfo_Annotation) GetPath() []int32 { + if m != nil { + return m.Path + } + return nil +} + +func (m *GeneratedCodeInfo_Annotation) GetSourceFile() string { + if m != nil && m.SourceFile != nil { + return *m.SourceFile + } + return "" +} + +func (m *GeneratedCodeInfo_Annotation) GetBegin() int32 { + if m != nil && m.Begin != nil { + return *m.Begin + } + return 0 +} + +func (m *GeneratedCodeInfo_Annotation) GetEnd() int32 { + if m != nil && m.End != nil { + return *m.End + } + return 0 +} + +func init() { + proto.RegisterType((*FileDescriptorSet)(nil), "google.protobuf.FileDescriptorSet") + proto.RegisterType((*FileDescriptorProto)(nil), "google.protobuf.FileDescriptorProto") + proto.RegisterType((*DescriptorProto)(nil), "google.protobuf.DescriptorProto") + proto.RegisterType((*DescriptorProto_ExtensionRange)(nil), "google.protobuf.DescriptorProto.ExtensionRange") + proto.RegisterType((*DescriptorProto_ReservedRange)(nil), "google.protobuf.DescriptorProto.ReservedRange") + proto.RegisterType((*ExtensionRangeOptions)(nil), "google.protobuf.ExtensionRangeOptions") + proto.RegisterType((*FieldDescriptorProto)(nil), "google.protobuf.FieldDescriptorProto") + proto.RegisterType((*OneofDescriptorProto)(nil), "google.protobuf.OneofDescriptorProto") + proto.RegisterType((*EnumDescriptorProto)(nil), "google.protobuf.EnumDescriptorProto") + proto.RegisterType((*EnumValueDescriptorProto)(nil), "google.protobuf.EnumValueDescriptorProto") + proto.RegisterType((*ServiceDescriptorProto)(nil), "google.protobuf.ServiceDescriptorProto") + proto.RegisterType((*MethodDescriptorProto)(nil), "google.protobuf.MethodDescriptorProto") + proto.RegisterType((*FileOptions)(nil), "google.protobuf.FileOptions") + proto.RegisterType((*MessageOptions)(nil), "google.protobuf.MessageOptions") + proto.RegisterType((*FieldOptions)(nil), "google.protobuf.FieldOptions") + proto.RegisterType((*OneofOptions)(nil), "google.protobuf.OneofOptions") + proto.RegisterType((*EnumOptions)(nil), "google.protobuf.EnumOptions") + proto.RegisterType((*EnumValueOptions)(nil), "google.protobuf.EnumValueOptions") + proto.RegisterType((*ServiceOptions)(nil), "google.protobuf.ServiceOptions") + proto.RegisterType((*MethodOptions)(nil), "google.protobuf.MethodOptions") + proto.RegisterType((*UninterpretedOption)(nil), "google.protobuf.UninterpretedOption") + proto.RegisterType((*UninterpretedOption_NamePart)(nil), "google.protobuf.UninterpretedOption.NamePart") + proto.RegisterType((*SourceCodeInfo)(nil), "google.protobuf.SourceCodeInfo") + proto.RegisterType((*SourceCodeInfo_Location)(nil), "google.protobuf.SourceCodeInfo.Location") + proto.RegisterType((*GeneratedCodeInfo)(nil), "google.protobuf.GeneratedCodeInfo") + proto.RegisterType((*GeneratedCodeInfo_Annotation)(nil), "google.protobuf.GeneratedCodeInfo.Annotation") + proto.RegisterEnum("google.protobuf.FieldDescriptorProto_Type", FieldDescriptorProto_Type_name, FieldDescriptorProto_Type_value) + proto.RegisterEnum("google.protobuf.FieldDescriptorProto_Label", FieldDescriptorProto_Label_name, FieldDescriptorProto_Label_value) + proto.RegisterEnum("google.protobuf.FileOptions_OptimizeMode", FileOptions_OptimizeMode_name, FileOptions_OptimizeMode_value) + proto.RegisterEnum("google.protobuf.FieldOptions_CType", FieldOptions_CType_name, FieldOptions_CType_value) + proto.RegisterEnum("google.protobuf.FieldOptions_JSType", FieldOptions_JSType_name, FieldOptions_JSType_value) + proto.RegisterEnum("google.protobuf.MethodOptions_IdempotencyLevel", MethodOptions_IdempotencyLevel_name, MethodOptions_IdempotencyLevel_value) +} + +func init() { proto.RegisterFile("google/protobuf/descriptor.proto", fileDescriptor0) } + +var fileDescriptor0 = []byte{ + // 2519 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x59, 0xdd, 0x6e, 0x1b, 0xc7, + 0x15, 0x0e, 0x7f, 0x45, 0x1e, 0x52, 0xd4, 0x68, 0xa4, 0xd8, 0x6b, 0xe5, 0xc7, 0x32, 0xf3, 0x63, + 0xd9, 0x69, 0xa8, 0x40, 0xb1, 0x1d, 0x47, 0x29, 0xd2, 0x52, 0xe4, 0x5a, 0xa1, 0x4a, 0x91, 0xec, + 0x92, 0x6a, 0x7e, 0x6e, 0x16, 0xa3, 0xdd, 0x21, 0xb9, 0xf6, 0x72, 0x77, 0xb3, 0xbb, 0xb4, 0xad, + 0xa0, 0x17, 0x06, 0x7a, 0x55, 0xa0, 0x0f, 0x50, 0x14, 0x45, 0x2f, 0x72, 0x13, 0xa0, 0x0f, 0x50, + 0x20, 0x77, 0x7d, 0x82, 0x02, 0x79, 0x83, 0xa2, 0x28, 0xd0, 0x3e, 0x46, 0x31, 0x33, 0xbb, 0xcb, + 0x5d, 0xfe, 0xc4, 0x6a, 0x80, 0x38, 0x57, 0xe4, 0x7c, 0xe7, 0x3b, 0x67, 0xce, 0x9c, 0x39, 0x33, + 0x73, 0x66, 0x16, 0x76, 0x47, 0xb6, 0x3d, 0x32, 0xe9, 0xbe, 0xe3, 0xda, 0xbe, 0x7d, 0x3e, 0x1d, + 0xee, 0xeb, 0xd4, 0xd3, 0x5c, 0xc3, 0xf1, 0x6d, 0xb7, 0xc6, 0x31, 0xbc, 0x21, 0x18, 0xb5, 0x90, + 0x51, 0x3d, 0x85, 0xcd, 0x07, 0x86, 0x49, 0x9b, 0x11, 0xb1, 0x4f, 0x7d, 0x7c, 0x1f, 0xb2, 0x43, + 0xc3, 0xa4, 0x52, 0x6a, 0x37, 0xb3, 0x57, 0x3a, 0x78, 0xb3, 0x36, 0xa7, 0x54, 0x4b, 0x6a, 0xf4, + 0x18, 0xac, 0x70, 0x8d, 0xea, 0xbf, 0xb3, 0xb0, 0xb5, 0x44, 0x8a, 0x31, 0x64, 0x2d, 0x32, 0x61, + 0x16, 0x53, 0x7b, 0x45, 0x85, 0xff, 0xc7, 0x12, 0xac, 0x39, 0x44, 0x7b, 0x44, 0x46, 0x54, 0x4a, + 0x73, 0x38, 0x6c, 0xe2, 0xd7, 0x01, 0x74, 0xea, 0x50, 0x4b, 0xa7, 0x96, 0x76, 0x21, 0x65, 0x76, + 0x33, 0x7b, 0x45, 0x25, 0x86, 0xe0, 0x77, 0x60, 0xd3, 0x99, 0x9e, 0x9b, 0x86, 0xa6, 0xc6, 0x68, + 0xb0, 0x9b, 0xd9, 0xcb, 0x29, 0x48, 0x08, 0x9a, 0x33, 0xf2, 0x4d, 0xd8, 0x78, 0x42, 0xc9, 0xa3, + 0x38, 0xb5, 0xc4, 0xa9, 0x15, 0x06, 0xc7, 0x88, 0x0d, 0x28, 0x4f, 0xa8, 0xe7, 0x91, 0x11, 0x55, + 0xfd, 0x0b, 0x87, 0x4a, 0x59, 0x3e, 0xfa, 0xdd, 0x85, 0xd1, 0xcf, 0x8f, 0xbc, 0x14, 0x68, 0x0d, + 0x2e, 0x1c, 0x8a, 0xeb, 0x50, 0xa4, 0xd6, 0x74, 0x22, 0x2c, 0xe4, 0x56, 0xc4, 0x4f, 0xb6, 0xa6, + 0x93, 0x79, 0x2b, 0x05, 0xa6, 0x16, 0x98, 0x58, 0xf3, 0xa8, 0xfb, 0xd8, 0xd0, 0xa8, 0x94, 0xe7, + 0x06, 0x6e, 0x2e, 0x18, 0xe8, 0x0b, 0xf9, 0xbc, 0x8d, 0x50, 0x0f, 0x37, 0xa0, 0x48, 0x9f, 0xfa, + 0xd4, 0xf2, 0x0c, 0xdb, 0x92, 0xd6, 0xb8, 0x91, 0xb7, 0x96, 0xcc, 0x22, 0x35, 0xf5, 0x79, 0x13, + 0x33, 0x3d, 0x7c, 0x0f, 0xd6, 0x6c, 0xc7, 0x37, 0x6c, 0xcb, 0x93, 0x0a, 0xbb, 0xa9, 0xbd, 0xd2, + 0xc1, 0xab, 0x4b, 0x13, 0xa1, 0x2b, 0x38, 0x4a, 0x48, 0xc6, 0x2d, 0x40, 0x9e, 0x3d, 0x75, 0x35, + 0xaa, 0x6a, 0xb6, 0x4e, 0x55, 0xc3, 0x1a, 0xda, 0x52, 0x91, 0x1b, 0xb8, 0xbe, 0x38, 0x10, 0x4e, + 0x6c, 0xd8, 0x3a, 0x6d, 0x59, 0x43, 0x5b, 0xa9, 0x78, 0x89, 0x36, 0xbe, 0x02, 0x79, 0xef, 0xc2, + 0xf2, 0xc9, 0x53, 0xa9, 0xcc, 0x33, 0x24, 0x68, 0x55, 0xbf, 0xcd, 0xc3, 0xc6, 0x65, 0x52, 0xec, + 0x23, 0xc8, 0x0d, 0xd9, 0x28, 0xa5, 0xf4, 0xff, 0x13, 0x03, 0xa1, 0x93, 0x0c, 0x62, 0xfe, 0x07, + 0x06, 0xb1, 0x0e, 0x25, 0x8b, 0x7a, 0x3e, 0xd5, 0x45, 0x46, 0x64, 0x2e, 0x99, 0x53, 0x20, 0x94, + 0x16, 0x53, 0x2a, 0xfb, 0x83, 0x52, 0xea, 0x33, 0xd8, 0x88, 0x5c, 0x52, 0x5d, 0x62, 0x8d, 0xc2, + 0xdc, 0xdc, 0x7f, 0x9e, 0x27, 0x35, 0x39, 0xd4, 0x53, 0x98, 0x9a, 0x52, 0xa1, 0x89, 0x36, 0x6e, + 0x02, 0xd8, 0x16, 0xb5, 0x87, 0xaa, 0x4e, 0x35, 0x53, 0x2a, 0xac, 0x88, 0x52, 0x97, 0x51, 0x16, + 0xa2, 0x64, 0x0b, 0x54, 0x33, 0xf1, 0x87, 0xb3, 0x54, 0x5b, 0x5b, 0x91, 0x29, 0xa7, 0x62, 0x91, + 0x2d, 0x64, 0xdb, 0x19, 0x54, 0x5c, 0xca, 0xf2, 0x9e, 0xea, 0xc1, 0xc8, 0x8a, 0xdc, 0x89, 0xda, + 0x73, 0x47, 0xa6, 0x04, 0x6a, 0x62, 0x60, 0xeb, 0x6e, 0xbc, 0x89, 0xdf, 0x80, 0x08, 0x50, 0x79, + 0x5a, 0x01, 0xdf, 0x85, 0xca, 0x21, 0xd8, 0x21, 0x13, 0xba, 0xf3, 0x15, 0x54, 0x92, 0xe1, 0xc1, + 0xdb, 0x90, 0xf3, 0x7c, 0xe2, 0xfa, 0x3c, 0x0b, 0x73, 0x8a, 0x68, 0x60, 0x04, 0x19, 0x6a, 0xe9, + 0x7c, 0x97, 0xcb, 0x29, 0xec, 0x2f, 0xfe, 0xe5, 0x6c, 0xc0, 0x19, 0x3e, 0xe0, 0xb7, 0x17, 0x67, + 0x34, 0x61, 0x79, 0x7e, 0xdc, 0x3b, 0x1f, 0xc0, 0x7a, 0x62, 0x00, 0x97, 0xed, 0xba, 0xfa, 0x5b, + 0x78, 0x79, 0xa9, 0x69, 0xfc, 0x19, 0x6c, 0x4f, 0x2d, 0xc3, 0xf2, 0xa9, 0xeb, 0xb8, 0x94, 0x65, + 0xac, 0xe8, 0x4a, 0xfa, 0xcf, 0xda, 0x8a, 0x9c, 0x3b, 0x8b, 0xb3, 0x85, 0x15, 0x65, 0x6b, 0xba, + 0x08, 0xde, 0x2e, 0x16, 0xfe, 0xbb, 0x86, 0x9e, 0x3d, 0x7b, 0xf6, 0x2c, 0x5d, 0xfd, 0x63, 0x1e, + 0xb6, 0x97, 0xad, 0x99, 0xa5, 0xcb, 0xf7, 0x0a, 0xe4, 0xad, 0xe9, 0xe4, 0x9c, 0xba, 0x3c, 0x48, + 0x39, 0x25, 0x68, 0xe1, 0x3a, 0xe4, 0x4c, 0x72, 0x4e, 0x4d, 0x29, 0xbb, 0x9b, 0xda, 0xab, 0x1c, + 0xbc, 0x73, 0xa9, 0x55, 0x59, 0x6b, 0x33, 0x15, 0x45, 0x68, 0xe2, 0x8f, 0x21, 0x1b, 0x6c, 0xd1, + 0xcc, 0xc2, 0xed, 0xcb, 0x59, 0x60, 0x6b, 0x49, 0xe1, 0x7a, 0xf8, 0x15, 0x28, 0xb2, 0x5f, 0x91, + 0x1b, 0x79, 0xee, 0x73, 0x81, 0x01, 0x2c, 0x2f, 0xf0, 0x0e, 0x14, 0xf8, 0x32, 0xd1, 0x69, 0x78, + 0xb4, 0x45, 0x6d, 0x96, 0x58, 0x3a, 0x1d, 0x92, 0xa9, 0xe9, 0xab, 0x8f, 0x89, 0x39, 0xa5, 0x3c, + 0xe1, 0x8b, 0x4a, 0x39, 0x00, 0x7f, 0xc3, 0x30, 0x7c, 0x1d, 0x4a, 0x62, 0x55, 0x19, 0x96, 0x4e, + 0x9f, 0xf2, 0xdd, 0x33, 0xa7, 0x88, 0x85, 0xd6, 0x62, 0x08, 0xeb, 0xfe, 0xa1, 0x67, 0x5b, 0x61, + 0x6a, 0xf2, 0x2e, 0x18, 0xc0, 0xbb, 0xff, 0x60, 0x7e, 0xe3, 0x7e, 0x6d, 0xf9, 0xf0, 0xe6, 0x73, + 0xaa, 0xfa, 0xb7, 0x34, 0x64, 0xf9, 0x7e, 0xb1, 0x01, 0xa5, 0xc1, 0xe7, 0x3d, 0x59, 0x6d, 0x76, + 0xcf, 0x8e, 0xda, 0x32, 0x4a, 0xe1, 0x0a, 0x00, 0x07, 0x1e, 0xb4, 0xbb, 0xf5, 0x01, 0x4a, 0x47, + 0xed, 0x56, 0x67, 0x70, 0xef, 0x0e, 0xca, 0x44, 0x0a, 0x67, 0x02, 0xc8, 0xc6, 0x09, 0xef, 0x1f, + 0xa0, 0x1c, 0x46, 0x50, 0x16, 0x06, 0x5a, 0x9f, 0xc9, 0xcd, 0x7b, 0x77, 0x50, 0x3e, 0x89, 0xbc, + 0x7f, 0x80, 0xd6, 0xf0, 0x3a, 0x14, 0x39, 0x72, 0xd4, 0xed, 0xb6, 0x51, 0x21, 0xb2, 0xd9, 0x1f, + 0x28, 0xad, 0xce, 0x31, 0x2a, 0x46, 0x36, 0x8f, 0x95, 0xee, 0x59, 0x0f, 0x41, 0x64, 0xe1, 0x54, + 0xee, 0xf7, 0xeb, 0xc7, 0x32, 0x2a, 0x45, 0x8c, 0xa3, 0xcf, 0x07, 0x72, 0x1f, 0x95, 0x13, 0x6e, + 0xbd, 0x7f, 0x80, 0xd6, 0xa3, 0x2e, 0xe4, 0xce, 0xd9, 0x29, 0xaa, 0xe0, 0x4d, 0x58, 0x17, 0x5d, + 0x84, 0x4e, 0x6c, 0xcc, 0x41, 0xf7, 0xee, 0x20, 0x34, 0x73, 0x44, 0x58, 0xd9, 0x4c, 0x00, 0xf7, + 0xee, 0x20, 0x5c, 0x6d, 0x40, 0x8e, 0x67, 0x17, 0xc6, 0x50, 0x69, 0xd7, 0x8f, 0xe4, 0xb6, 0xda, + 0xed, 0x0d, 0x5a, 0xdd, 0x4e, 0xbd, 0x8d, 0x52, 0x33, 0x4c, 0x91, 0x7f, 0x7d, 0xd6, 0x52, 0xe4, + 0x26, 0x4a, 0xc7, 0xb1, 0x9e, 0x5c, 0x1f, 0xc8, 0x4d, 0x94, 0xa9, 0x6a, 0xb0, 0xbd, 0x6c, 0x9f, + 0x5c, 0xba, 0x32, 0x62, 0x53, 0x9c, 0x5e, 0x31, 0xc5, 0xdc, 0xd6, 0xc2, 0x14, 0x7f, 0x9d, 0x82, + 0xad, 0x25, 0x67, 0xc5, 0xd2, 0x4e, 0x7e, 0x01, 0x39, 0x91, 0xa2, 0xe2, 0xf4, 0xbc, 0xb5, 0xf4, + 0xd0, 0xe1, 0x09, 0xbb, 0x70, 0x82, 0x72, 0xbd, 0x78, 0x05, 0x91, 0x59, 0x51, 0x41, 0x30, 0x13, + 0x0b, 0x4e, 0xfe, 0x2e, 0x05, 0xd2, 0x2a, 0xdb, 0xcf, 0xd9, 0x28, 0xd2, 0x89, 0x8d, 0xe2, 0xa3, + 0x79, 0x07, 0x6e, 0xac, 0x1e, 0xc3, 0x82, 0x17, 0xdf, 0xa4, 0xe0, 0xca, 0xf2, 0x42, 0x6b, 0xa9, + 0x0f, 0x1f, 0x43, 0x7e, 0x42, 0xfd, 0xb1, 0x1d, 0x16, 0x1b, 0x6f, 0x2f, 0x39, 0xc2, 0x98, 0x78, + 0x3e, 0x56, 0x81, 0x56, 0xfc, 0x0c, 0xcc, 0xac, 0xaa, 0x96, 0x84, 0x37, 0x0b, 0x9e, 0xfe, 0x3e, + 0x0d, 0x2f, 0x2f, 0x35, 0xbe, 0xd4, 0xd1, 0xd7, 0x00, 0x0c, 0xcb, 0x99, 0xfa, 0xa2, 0xa0, 0x10, + 0xfb, 0x53, 0x91, 0x23, 0x7c, 0xed, 0xb3, 0xbd, 0x67, 0xea, 0x47, 0xf2, 0x0c, 0x97, 0x83, 0x80, + 0x38, 0xe1, 0xfe, 0xcc, 0xd1, 0x2c, 0x77, 0xf4, 0xf5, 0x15, 0x23, 0x5d, 0x38, 0xab, 0xdf, 0x03, + 0xa4, 0x99, 0x06, 0xb5, 0x7c, 0xd5, 0xf3, 0x5d, 0x4a, 0x26, 0x86, 0x35, 0xe2, 0x1b, 0x70, 0xe1, + 0x30, 0x37, 0x24, 0xa6, 0x47, 0x95, 0x0d, 0x21, 0xee, 0x87, 0x52, 0xa6, 0xc1, 0xcf, 0x38, 0x37, + 0xa6, 0x91, 0x4f, 0x68, 0x08, 0x71, 0xa4, 0x51, 0xfd, 0xb6, 0x00, 0xa5, 0x58, 0x59, 0x8a, 0x6f, + 0x40, 0xf9, 0x21, 0x79, 0x4c, 0xd4, 0xf0, 0xaa, 0x21, 0x22, 0x51, 0x62, 0x58, 0x2f, 0xb8, 0x6e, + 0xbc, 0x07, 0xdb, 0x9c, 0x62, 0x4f, 0x7d, 0xea, 0xaa, 0x9a, 0x49, 0x3c, 0x8f, 0x07, 0xad, 0xc0, + 0xa9, 0x98, 0xc9, 0xba, 0x4c, 0xd4, 0x08, 0x25, 0xf8, 0x2e, 0x6c, 0x71, 0x8d, 0xc9, 0xd4, 0xf4, + 0x0d, 0xc7, 0xa4, 0x2a, 0xbb, 0xfc, 0x78, 0x7c, 0x23, 0x8e, 0x3c, 0xdb, 0x64, 0x8c, 0xd3, 0x80, + 0xc0, 0x3c, 0xf2, 0x70, 0x13, 0x5e, 0xe3, 0x6a, 0x23, 0x6a, 0x51, 0x97, 0xf8, 0x54, 0xa5, 0x5f, + 0x4e, 0x89, 0xe9, 0xa9, 0xc4, 0xd2, 0xd5, 0x31, 0xf1, 0xc6, 0xd2, 0x36, 0x33, 0x70, 0x94, 0x96, + 0x52, 0xca, 0x35, 0x46, 0x3c, 0x0e, 0x78, 0x32, 0xa7, 0xd5, 0x2d, 0xfd, 0x13, 0xe2, 0x8d, 0xf1, + 0x21, 0x5c, 0xe1, 0x56, 0x3c, 0xdf, 0x35, 0xac, 0x91, 0xaa, 0x8d, 0xa9, 0xf6, 0x48, 0x9d, 0xfa, + 0xc3, 0xfb, 0xd2, 0x2b, 0xf1, 0xfe, 0xb9, 0x87, 0x7d, 0xce, 0x69, 0x30, 0xca, 0x99, 0x3f, 0xbc, + 0x8f, 0xfb, 0x50, 0x66, 0x93, 0x31, 0x31, 0xbe, 0xa2, 0xea, 0xd0, 0x76, 0xf9, 0xc9, 0x52, 0x59, + 0xb2, 0xb2, 0x63, 0x11, 0xac, 0x75, 0x03, 0x85, 0x53, 0x5b, 0xa7, 0x87, 0xb9, 0x7e, 0x4f, 0x96, + 0x9b, 0x4a, 0x29, 0xb4, 0xf2, 0xc0, 0x76, 0x59, 0x42, 0x8d, 0xec, 0x28, 0xc0, 0x25, 0x91, 0x50, + 0x23, 0x3b, 0x0c, 0xef, 0x5d, 0xd8, 0xd2, 0x34, 0x31, 0x66, 0x43, 0x53, 0x83, 0x2b, 0x8a, 0x27, + 0xa1, 0x44, 0xb0, 0x34, 0xed, 0x58, 0x10, 0x82, 0x1c, 0xf7, 0xf0, 0x87, 0xf0, 0xf2, 0x2c, 0x58, + 0x71, 0xc5, 0xcd, 0x85, 0x51, 0xce, 0xab, 0xde, 0x85, 0x2d, 0xe7, 0x62, 0x51, 0x11, 0x27, 0x7a, + 0x74, 0x2e, 0xe6, 0xd5, 0x3e, 0x80, 0x6d, 0x67, 0xec, 0x2c, 0xea, 0xdd, 0x8e, 0xeb, 0x61, 0x67, + 0xec, 0xcc, 0x2b, 0xbe, 0xc5, 0xef, 0xab, 0x2e, 0xd5, 0x88, 0x4f, 0x75, 0xe9, 0x6a, 0x9c, 0x1e, + 0x13, 0xe0, 0x7d, 0x40, 0x9a, 0xa6, 0x52, 0x8b, 0x9c, 0x9b, 0x54, 0x25, 0x2e, 0xb5, 0x88, 0x27, + 0x5d, 0x8f, 0x93, 0x2b, 0x9a, 0x26, 0x73, 0x69, 0x9d, 0x0b, 0xf1, 0x6d, 0xd8, 0xb4, 0xcf, 0x1f, + 0x6a, 0x22, 0x25, 0x55, 0xc7, 0xa5, 0x43, 0xe3, 0xa9, 0xf4, 0x26, 0x8f, 0xef, 0x06, 0x13, 0xf0, + 0x84, 0xec, 0x71, 0x18, 0xdf, 0x02, 0xa4, 0x79, 0x63, 0xe2, 0x3a, 0xbc, 0x26, 0xf0, 0x1c, 0xa2, + 0x51, 0xe9, 0x2d, 0x41, 0x15, 0x78, 0x27, 0x84, 0xd9, 0x92, 0xf0, 0x9e, 0x18, 0x43, 0x3f, 0xb4, + 0x78, 0x53, 0x2c, 0x09, 0x8e, 0x05, 0xd6, 0xf6, 0x00, 0xb1, 0x50, 0x24, 0x3a, 0xde, 0xe3, 0xb4, + 0x8a, 0x33, 0x76, 0xe2, 0xfd, 0xbe, 0x01, 0xeb, 0x8c, 0x39, 0xeb, 0xf4, 0x96, 0xa8, 0x67, 0x9c, + 0x71, 0xac, 0xc7, 0x1f, 0xad, 0xb4, 0xac, 0x1e, 0x42, 0x39, 0x9e, 0x9f, 0xb8, 0x08, 0x22, 0x43, + 0x51, 0x8a, 0x9d, 0xf5, 0x8d, 0x6e, 0x93, 0x9d, 0xd2, 0x5f, 0xc8, 0x28, 0xcd, 0xaa, 0x85, 0x76, + 0x6b, 0x20, 0xab, 0xca, 0x59, 0x67, 0xd0, 0x3a, 0x95, 0x51, 0x26, 0x56, 0x96, 0x9e, 0x64, 0x0b, + 0x6f, 0xa3, 0x9b, 0xd5, 0xef, 0xd2, 0x50, 0x49, 0xde, 0x33, 0xf0, 0xcf, 0xe1, 0x6a, 0xf8, 0x28, + 0xe0, 0x51, 0x5f, 0x7d, 0x62, 0xb8, 0x7c, 0xe1, 0x4c, 0x88, 0xa8, 0xb3, 0xa3, 0xa9, 0xdb, 0x0e, + 0x58, 0x7d, 0xea, 0x7f, 0x6a, 0xb8, 0x6c, 0x59, 0x4c, 0x88, 0x8f, 0xdb, 0x70, 0xdd, 0xb2, 0x55, + 0xcf, 0x27, 0x96, 0x4e, 0x5c, 0x5d, 0x9d, 0x3d, 0xc7, 0xa8, 0x44, 0xd3, 0xa8, 0xe7, 0xd9, 0xe2, + 0xc0, 0x8a, 0xac, 0xbc, 0x6a, 0xd9, 0xfd, 0x80, 0x3c, 0xdb, 0xc9, 0xeb, 0x01, 0x75, 0x2e, 0xcd, + 0x32, 0xab, 0xd2, 0xec, 0x15, 0x28, 0x4e, 0x88, 0xa3, 0x52, 0xcb, 0x77, 0x2f, 0x78, 0x75, 0x59, + 0x50, 0x0a, 0x13, 0xe2, 0xc8, 0xac, 0xfd, 0x42, 0x8a, 0xfc, 0x93, 0x6c, 0xa1, 0x80, 0x8a, 0x27, + 0xd9, 0x42, 0x11, 0x41, 0xf5, 0x5f, 0x19, 0x28, 0xc7, 0xab, 0x4d, 0x56, 0xbc, 0x6b, 0xfc, 0x64, + 0x49, 0xf1, 0xbd, 0xe7, 0x8d, 0xef, 0xad, 0x4d, 0x6b, 0x0d, 0x76, 0xe4, 0x1c, 0xe6, 0x45, 0x0d, + 0xa8, 0x08, 0x4d, 0x76, 0xdc, 0xb3, 0xdd, 0x86, 0x8a, 0x7b, 0x4d, 0x41, 0x09, 0x5a, 0xf8, 0x18, + 0xf2, 0x0f, 0x3d, 0x6e, 0x3b, 0xcf, 0x6d, 0xbf, 0xf9, 0xfd, 0xb6, 0x4f, 0xfa, 0xdc, 0x78, 0xf1, + 0xa4, 0xaf, 0x76, 0xba, 0xca, 0x69, 0xbd, 0xad, 0x04, 0xea, 0xf8, 0x1a, 0x64, 0x4d, 0xf2, 0xd5, + 0x45, 0xf2, 0x70, 0xe2, 0xd0, 0x65, 0x27, 0xe1, 0x1a, 0x64, 0x9f, 0x50, 0xf2, 0x28, 0x79, 0x24, + 0x70, 0xe8, 0x47, 0x5c, 0x0c, 0xfb, 0x90, 0xe3, 0xf1, 0xc2, 0x00, 0x41, 0xc4, 0xd0, 0x4b, 0xb8, + 0x00, 0xd9, 0x46, 0x57, 0x61, 0x0b, 0x02, 0x41, 0x59, 0xa0, 0x6a, 0xaf, 0x25, 0x37, 0x64, 0x94, + 0xae, 0xde, 0x85, 0xbc, 0x08, 0x02, 0x5b, 0x2c, 0x51, 0x18, 0xd0, 0x4b, 0x41, 0x33, 0xb0, 0x91, + 0x0a, 0xa5, 0x67, 0xa7, 0x47, 0xb2, 0x82, 0xd2, 0xc9, 0xa9, 0xce, 0xa2, 0x5c, 0xd5, 0x83, 0x72, + 0xbc, 0xdc, 0x7c, 0x31, 0x57, 0xc9, 0xbf, 0xa7, 0xa0, 0x14, 0x2b, 0x1f, 0x59, 0xe1, 0x42, 0x4c, + 0xd3, 0x7e, 0xa2, 0x12, 0xd3, 0x20, 0x5e, 0x90, 0x1a, 0xc0, 0xa1, 0x3a, 0x43, 0x2e, 0x3b, 0x75, + 0x2f, 0x68, 0x89, 0xe4, 0x50, 0xbe, 0xfa, 0x97, 0x14, 0xa0, 0xf9, 0x02, 0x74, 0xce, 0xcd, 0xd4, + 0x4f, 0xe9, 0x66, 0xf5, 0xcf, 0x29, 0xa8, 0x24, 0xab, 0xce, 0x39, 0xf7, 0x6e, 0xfc, 0xa4, 0xee, + 0xfd, 0x33, 0x0d, 0xeb, 0x89, 0x5a, 0xf3, 0xb2, 0xde, 0x7d, 0x09, 0x9b, 0x86, 0x4e, 0x27, 0x8e, + 0xed, 0x53, 0x4b, 0xbb, 0x50, 0x4d, 0xfa, 0x98, 0x9a, 0x52, 0x95, 0x6f, 0x1a, 0xfb, 0xdf, 0x5f, + 0xcd, 0xd6, 0x5a, 0x33, 0xbd, 0x36, 0x53, 0x3b, 0xdc, 0x6a, 0x35, 0xe5, 0xd3, 0x5e, 0x77, 0x20, + 0x77, 0x1a, 0x9f, 0xab, 0x67, 0x9d, 0x5f, 0x75, 0xba, 0x9f, 0x76, 0x14, 0x64, 0xcc, 0xd1, 0x7e, + 0xc4, 0x65, 0xdf, 0x03, 0x34, 0xef, 0x14, 0xbe, 0x0a, 0xcb, 0xdc, 0x42, 0x2f, 0xe1, 0x2d, 0xd8, + 0xe8, 0x74, 0xd5, 0x7e, 0xab, 0x29, 0xab, 0xf2, 0x83, 0x07, 0x72, 0x63, 0xd0, 0x17, 0xd7, 0xfb, + 0x88, 0x3d, 0x48, 0x2c, 0xf0, 0xea, 0x9f, 0x32, 0xb0, 0xb5, 0xc4, 0x13, 0x5c, 0x0f, 0x6e, 0x16, + 0xe2, 0xb2, 0xf3, 0xee, 0x65, 0xbc, 0xaf, 0xb1, 0x82, 0xa0, 0x47, 0x5c, 0x3f, 0xb8, 0x88, 0xdc, + 0x02, 0x16, 0x25, 0xcb, 0x37, 0x86, 0x06, 0x75, 0x83, 0xd7, 0x10, 0x71, 0xdd, 0xd8, 0x98, 0xe1, + 0xe2, 0x41, 0xe4, 0x67, 0x80, 0x1d, 0xdb, 0x33, 0x7c, 0xe3, 0x31, 0x55, 0x0d, 0x2b, 0x7c, 0x3a, + 0x61, 0xd7, 0x8f, 0xac, 0x82, 0x42, 0x49, 0xcb, 0xf2, 0x23, 0xb6, 0x45, 0x47, 0x64, 0x8e, 0xcd, + 0x36, 0xf3, 0x8c, 0x82, 0x42, 0x49, 0xc4, 0xbe, 0x01, 0x65, 0xdd, 0x9e, 0xb2, 0x9a, 0x4c, 0xf0, + 0xd8, 0xd9, 0x91, 0x52, 0x4a, 0x02, 0x8b, 0x28, 0x41, 0xb5, 0x3d, 0x7b, 0xb3, 0x29, 0x2b, 0x25, + 0x81, 0x09, 0xca, 0x4d, 0xd8, 0x20, 0xa3, 0x91, 0xcb, 0x8c, 0x87, 0x86, 0xc4, 0xfd, 0xa1, 0x12, + 0xc1, 0x9c, 0xb8, 0x73, 0x02, 0x85, 0x30, 0x0e, 0xec, 0xa8, 0x66, 0x91, 0x50, 0x1d, 0xf1, 0x6e, + 0x97, 0xde, 0x2b, 0x2a, 0x05, 0x2b, 0x14, 0xde, 0x80, 0xb2, 0xe1, 0xa9, 0xb3, 0x27, 0xe8, 0xf4, + 0x6e, 0x7a, 0xaf, 0xa0, 0x94, 0x0c, 0x2f, 0x7a, 0xbe, 0xab, 0x7e, 0x93, 0x86, 0x4a, 0xf2, 0x09, + 0x1d, 0x37, 0xa1, 0x60, 0xda, 0x1a, 0xe1, 0xa9, 0x25, 0xbe, 0xdf, 0xec, 0x3d, 0xe7, 0xd5, 0xbd, + 0xd6, 0x0e, 0xf8, 0x4a, 0xa4, 0xb9, 0xf3, 0x8f, 0x14, 0x14, 0x42, 0x18, 0x5f, 0x81, 0xac, 0x43, + 0xfc, 0x31, 0x37, 0x97, 0x3b, 0x4a, 0xa3, 0x94, 0xc2, 0xdb, 0x0c, 0xf7, 0x1c, 0x62, 0xf1, 0x14, + 0x08, 0x70, 0xd6, 0x66, 0xf3, 0x6a, 0x52, 0xa2, 0xf3, 0xcb, 0x89, 0x3d, 0x99, 0x50, 0xcb, 0xf7, + 0xc2, 0x79, 0x0d, 0xf0, 0x46, 0x00, 0xe3, 0x77, 0x60, 0xd3, 0x77, 0x89, 0x61, 0x26, 0xb8, 0x59, + 0xce, 0x45, 0xa1, 0x20, 0x22, 0x1f, 0xc2, 0xb5, 0xd0, 0xae, 0x4e, 0x7d, 0xa2, 0x8d, 0xa9, 0x3e, + 0x53, 0xca, 0xf3, 0xf7, 0xd9, 0xab, 0x01, 0xa1, 0x19, 0xc8, 0x43, 0xdd, 0xea, 0x77, 0x29, 0xd8, + 0x0c, 0xaf, 0x53, 0x7a, 0x14, 0xac, 0x53, 0x00, 0x62, 0x59, 0xb6, 0x1f, 0x0f, 0xd7, 0x62, 0x2a, + 0x2f, 0xe8, 0xd5, 0xea, 0x91, 0x92, 0x12, 0x33, 0xb0, 0x33, 0x01, 0x98, 0x49, 0x56, 0x86, 0xed, + 0x3a, 0x94, 0x82, 0xef, 0x23, 0xfc, 0x23, 0x9b, 0xb8, 0x80, 0x83, 0x80, 0xd8, 0xbd, 0x0b, 0x6f, + 0x43, 0xee, 0x9c, 0x8e, 0x0c, 0x2b, 0x78, 0xf5, 0x14, 0x8d, 0xf0, 0x25, 0x37, 0x1b, 0xbd, 0xe4, + 0x1e, 0xfd, 0x21, 0x05, 0x5b, 0x9a, 0x3d, 0x99, 0xf7, 0xf7, 0x08, 0xcd, 0xbd, 0x02, 0x78, 0x9f, + 0xa4, 0xbe, 0xf8, 0x78, 0x64, 0xf8, 0xe3, 0xe9, 0x79, 0x4d, 0xb3, 0x27, 0xfb, 0x23, 0xdb, 0x24, + 0xd6, 0x68, 0xf6, 0x95, 0x90, 0xff, 0xd1, 0xde, 0x1d, 0x51, 0xeb, 0xdd, 0x91, 0x1d, 0xfb, 0x66, + 0xf8, 0xd1, 0xec, 0xef, 0xd7, 0xe9, 0xcc, 0x71, 0xef, 0xe8, 0xaf, 0xe9, 0x9d, 0x63, 0xd1, 0x57, + 0x2f, 0x8c, 0x8d, 0x42, 0x87, 0x26, 0xd5, 0xd8, 0x78, 0xff, 0x17, 0x00, 0x00, 0xff, 0xff, 0x0c, + 0xab, 0xb6, 0x37, 0x7e, 0x1c, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/descriptor.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/descriptor.proto new file mode 100644 index 000000000..4d4fb378f --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/descriptor.proto @@ -0,0 +1,849 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// The messages in this file describe the definitions found in .proto files. +// A valid .proto file can be translated directly to a FileDescriptorProto +// without any other information (e.g. without reading its imports). + + +syntax = "proto2"; + +package google.protobuf; +option go_package = "github.com/golang/protobuf/protoc-gen-go/descriptor;descriptor"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "DescriptorProtos"; +option csharp_namespace = "Google.Protobuf.Reflection"; +option objc_class_prefix = "GPB"; + +// descriptor.proto must be optimized for speed because reflection-based +// algorithms don't work during bootstrapping. +option optimize_for = SPEED; + +// The protocol compiler can output a FileDescriptorSet containing the .proto +// files it parses. +message FileDescriptorSet { + repeated FileDescriptorProto file = 1; +} + +// Describes a complete .proto file. +message FileDescriptorProto { + optional string name = 1; // file name, relative to root of source tree + optional string package = 2; // e.g. "foo", "foo.bar", etc. + + // Names of files imported by this file. + repeated string dependency = 3; + // Indexes of the public imported files in the dependency list above. + repeated int32 public_dependency = 10; + // Indexes of the weak imported files in the dependency list. + // For Google-internal migration only. Do not use. + repeated int32 weak_dependency = 11; + + // All top-level definitions in this file. + repeated DescriptorProto message_type = 4; + repeated EnumDescriptorProto enum_type = 5; + repeated ServiceDescriptorProto service = 6; + repeated FieldDescriptorProto extension = 7; + + optional FileOptions options = 8; + + // This field contains optional information about the original source code. + // You may safely remove this entire field without harming runtime + // functionality of the descriptors -- the information is needed only by + // development tools. + optional SourceCodeInfo source_code_info = 9; + + // The syntax of the proto file. + // The supported values are "proto2" and "proto3". + optional string syntax = 12; +} + +// Describes a message type. +message DescriptorProto { + optional string name = 1; + + repeated FieldDescriptorProto field = 2; + repeated FieldDescriptorProto extension = 6; + + repeated DescriptorProto nested_type = 3; + repeated EnumDescriptorProto enum_type = 4; + + message ExtensionRange { + optional int32 start = 1; + optional int32 end = 2; + + optional ExtensionRangeOptions options = 3; + } + repeated ExtensionRange extension_range = 5; + + repeated OneofDescriptorProto oneof_decl = 8; + + optional MessageOptions options = 7; + + // Range of reserved tag numbers. Reserved tag numbers may not be used by + // fields or extension ranges in the same message. Reserved ranges may + // not overlap. + message ReservedRange { + optional int32 start = 1; // Inclusive. + optional int32 end = 2; // Exclusive. + } + repeated ReservedRange reserved_range = 9; + // Reserved field names, which may not be used by fields in the same message. + // A given name may only be reserved once. + repeated string reserved_name = 10; +} + +message ExtensionRangeOptions { + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +// Describes a field within a message. +message FieldDescriptorProto { + enum Type { + // 0 is reserved for errors. + // Order is weird for historical reasons. + TYPE_DOUBLE = 1; + TYPE_FLOAT = 2; + // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + // negative values are likely. + TYPE_INT64 = 3; + TYPE_UINT64 = 4; + // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + // negative values are likely. + TYPE_INT32 = 5; + TYPE_FIXED64 = 6; + TYPE_FIXED32 = 7; + TYPE_BOOL = 8; + TYPE_STRING = 9; + // Tag-delimited aggregate. + // Group type is deprecated and not supported in proto3. However, Proto3 + // implementations should still be able to parse the group wire format and + // treat group fields as unknown fields. + TYPE_GROUP = 10; + TYPE_MESSAGE = 11; // Length-delimited aggregate. + + // New in version 2. + TYPE_BYTES = 12; + TYPE_UINT32 = 13; + TYPE_ENUM = 14; + TYPE_SFIXED32 = 15; + TYPE_SFIXED64 = 16; + TYPE_SINT32 = 17; // Uses ZigZag encoding. + TYPE_SINT64 = 18; // Uses ZigZag encoding. + }; + + enum Label { + // 0 is reserved for errors + LABEL_OPTIONAL = 1; + LABEL_REQUIRED = 2; + LABEL_REPEATED = 3; + }; + + optional string name = 1; + optional int32 number = 3; + optional Label label = 4; + + // If type_name is set, this need not be set. If both this and type_name + // are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + optional Type type = 5; + + // For message and enum types, this is the name of the type. If the name + // starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + // rules are used to find the type (i.e. first the nested types within this + // message are searched, then within the parent, on up to the root + // namespace). + optional string type_name = 6; + + // For extensions, this is the name of the type being extended. It is + // resolved in the same manner as type_name. + optional string extendee = 2; + + // For numeric types, contains the original text representation of the value. + // For booleans, "true" or "false". + // For strings, contains the default text contents (not escaped in any way). + // For bytes, contains the C escaped value. All bytes >= 128 are escaped. + // TODO(kenton): Base-64 encode? + optional string default_value = 7; + + // If set, gives the index of a oneof in the containing type's oneof_decl + // list. This field is a member of that oneof. + optional int32 oneof_index = 9; + + // JSON name of this field. The value is set by protocol compiler. If the + // user has set a "json_name" option on this field, that option's value + // will be used. Otherwise, it's deduced from the field's name by converting + // it to camelCase. + optional string json_name = 10; + + optional FieldOptions options = 8; +} + +// Describes a oneof. +message OneofDescriptorProto { + optional string name = 1; + optional OneofOptions options = 2; +} + +// Describes an enum type. +message EnumDescriptorProto { + optional string name = 1; + + repeated EnumValueDescriptorProto value = 2; + + optional EnumOptions options = 3; +} + +// Describes a value within an enum. +message EnumValueDescriptorProto { + optional string name = 1; + optional int32 number = 2; + + optional EnumValueOptions options = 3; +} + +// Describes a service. +message ServiceDescriptorProto { + optional string name = 1; + repeated MethodDescriptorProto method = 2; + + optional ServiceOptions options = 3; +} + +// Describes a method of a service. +message MethodDescriptorProto { + optional string name = 1; + + // Input and output type names. These are resolved in the same way as + // FieldDescriptorProto.type_name, but must refer to a message type. + optional string input_type = 2; + optional string output_type = 3; + + optional MethodOptions options = 4; + + // Identifies if client streams multiple client messages + optional bool client_streaming = 5 [default=false]; + // Identifies if server streams multiple server messages + optional bool server_streaming = 6 [default=false]; +} + + +// =================================================================== +// Options + +// Each of the definitions above may have "options" attached. These are +// just annotations which may cause code to be generated slightly differently +// or may contain hints for code that manipulates protocol messages. +// +// Clients may define custom options as extensions of the *Options messages. +// These extensions may not yet be known at parsing time, so the parser cannot +// store the values in them. Instead it stores them in a field in the *Options +// message called uninterpreted_option. This field must have the same name +// across all *Options messages. We then use this field to populate the +// extensions when we build a descriptor, at which point all protos have been +// parsed and so all extensions are known. +// +// Extension numbers for custom options may be chosen as follows: +// * For options which will only be used within a single application or +// organization, or for experimental options, use field numbers 50000 +// through 99999. It is up to you to ensure that you do not use the +// same number for multiple options. +// * For options which will be published and used publicly by multiple +// independent entities, e-mail protobuf-global-extension-registry@google.com +// to reserve extension numbers. Simply provide your project name (e.g. +// Objective-C plugin) and your project website (if available) -- there's no +// need to explain how you intend to use them. Usually you only need one +// extension number. You can declare multiple options with only one extension +// number by putting them in a sub-message. See the Custom Options section of +// the docs for examples: +// https://developers.google.com/protocol-buffers/docs/proto#options +// If this turns out to be popular, a web service will be set up +// to automatically assign option numbers. + + +message FileOptions { + + // Sets the Java package where classes generated from this .proto will be + // placed. By default, the proto package is used, but this is often + // inappropriate because proto packages do not normally start with backwards + // domain names. + optional string java_package = 1; + + + // If set, all the classes from the .proto file are wrapped in a single + // outer class with the given name. This applies to both Proto1 + // (equivalent to the old "--one_java_file" option) and Proto2 (where + // a .proto always translates to a single class, but you may want to + // explicitly choose the class name). + optional string java_outer_classname = 8; + + // If set true, then the Java code generator will generate a separate .java + // file for each top-level message, enum, and service defined in the .proto + // file. Thus, these types will *not* be nested inside the outer class + // named by java_outer_classname. However, the outer class will still be + // generated to contain the file's getDescriptor() method as well as any + // top-level extensions defined in the file. + optional bool java_multiple_files = 10 [default=false]; + + // This option does nothing. + optional bool java_generate_equals_and_hash = 20 [deprecated=true]; + + // If set true, then the Java2 code generator will generate code that + // throws an exception whenever an attempt is made to assign a non-UTF-8 + // byte sequence to a string field. + // Message reflection will do the same. + // However, an extension field still accepts non-UTF-8 byte sequences. + // This option has no effect on when used with the lite runtime. + optional bool java_string_check_utf8 = 27 [default=false]; + + + // Generated classes can be optimized for speed or code size. + enum OptimizeMode { + SPEED = 1; // Generate complete code for parsing, serialization, + // etc. + CODE_SIZE = 2; // Use ReflectionOps to implement these methods. + LITE_RUNTIME = 3; // Generate code using MessageLite and the lite runtime. + } + optional OptimizeMode optimize_for = 9 [default=SPEED]; + + // Sets the Go package where structs generated from this .proto will be + // placed. If omitted, the Go package will be derived from the following: + // - The basename of the package import path, if provided. + // - Otherwise, the package statement in the .proto file, if present. + // - Otherwise, the basename of the .proto file, without extension. + optional string go_package = 11; + + + + // Should generic services be generated in each language? "Generic" services + // are not specific to any particular RPC system. They are generated by the + // main code generators in each language (without additional plugins). + // Generic services were the only kind of service generation supported by + // early versions of google.protobuf. + // + // Generic services are now considered deprecated in favor of using plugins + // that generate code specific to your particular RPC system. Therefore, + // these default to false. Old code which depends on generic services should + // explicitly set them to true. + optional bool cc_generic_services = 16 [default=false]; + optional bool java_generic_services = 17 [default=false]; + optional bool py_generic_services = 18 [default=false]; + optional bool php_generic_services = 42 [default=false]; + + // Is this file deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for everything in the file, or it will be completely ignored; in the very + // least, this is a formalization for deprecating files. + optional bool deprecated = 23 [default=false]; + + // Enables the use of arenas for the proto messages in this file. This applies + // only to generated classes for C++. + optional bool cc_enable_arenas = 31 [default=false]; + + + // Sets the objective c class prefix which is prepended to all objective c + // generated classes from this .proto. There is no default. + optional string objc_class_prefix = 36; + + // Namespace for generated classes; defaults to the package. + optional string csharp_namespace = 37; + + // By default Swift generators will take the proto package and CamelCase it + // replacing '.' with underscore and use that to prefix the types/symbols + // defined. When this options is provided, they will use this value instead + // to prefix the types/symbols defined. + optional string swift_prefix = 39; + + // Sets the php class prefix which is prepended to all php generated classes + // from this .proto. Default is empty. + optional string php_class_prefix = 40; + + // Use this option to change the namespace of php generated classes. Default + // is empty. When this option is empty, the package name will be used for + // determining the namespace. + optional string php_namespace = 41; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; + + reserved 38; +} + +message MessageOptions { + // Set true to use the old proto1 MessageSet wire format for extensions. + // This is provided for backwards-compatibility with the MessageSet wire + // format. You should not use this for any other reason: It's less + // efficient, has fewer features, and is more complicated. + // + // The message must be defined exactly as follows: + // message Foo { + // option message_set_wire_format = true; + // extensions 4 to max; + // } + // Note that the message cannot have any defined fields; MessageSets only + // have extensions. + // + // All extensions of your type must be singular messages; e.g. they cannot + // be int32s, enums, or repeated messages. + // + // Because this is an option, the above two restrictions are not enforced by + // the protocol compiler. + optional bool message_set_wire_format = 1 [default=false]; + + // Disables the generation of the standard "descriptor()" accessor, which can + // conflict with a field of the same name. This is meant to make migration + // from proto1 easier; new code should avoid fields named "descriptor". + optional bool no_standard_descriptor_accessor = 2 [default=false]; + + // Is this message deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the message, or it will be completely ignored; in the very least, + // this is a formalization for deprecating messages. + optional bool deprecated = 3 [default=false]; + + // Whether the message is an automatically generated map entry type for the + // maps field. + // + // For maps fields: + // map map_field = 1; + // The parsed descriptor looks like: + // message MapFieldEntry { + // option map_entry = true; + // optional KeyType key = 1; + // optional ValueType value = 2; + // } + // repeated MapFieldEntry map_field = 1; + // + // Implementations may choose not to generate the map_entry=true message, but + // use a native map in the target language to hold the keys and values. + // The reflection APIs in such implementions still need to work as + // if the field is a repeated message field. + // + // NOTE: Do not set the option in .proto files. Always use the maps syntax + // instead. The option should only be implicitly set by the proto compiler + // parser. + optional bool map_entry = 7; + + reserved 8; // javalite_serializable + reserved 9; // javanano_as_lite + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message FieldOptions { + // The ctype option instructs the C++ code generator to use a different + // representation of the field than it normally would. See the specific + // options below. This option is not yet implemented in the open source + // release -- sorry, we'll try to include it in a future version! + optional CType ctype = 1 [default = STRING]; + enum CType { + // Default mode. + STRING = 0; + + CORD = 1; + + STRING_PIECE = 2; + } + // The packed option can be enabled for repeated primitive fields to enable + // a more efficient representation on the wire. Rather than repeatedly + // writing the tag and type for each element, the entire array is encoded as + // a single length-delimited blob. In proto3, only explicit setting it to + // false will avoid using packed encoding. + optional bool packed = 2; + + // The jstype option determines the JavaScript type used for values of the + // field. The option is permitted only for 64 bit integral and fixed types + // (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + // is represented as JavaScript string, which avoids loss of precision that + // can happen when a large value is converted to a floating point JavaScript. + // Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + // use the JavaScript "number" type. The behavior of the default option + // JS_NORMAL is implementation dependent. + // + // This option is an enum to permit additional types to be added, e.g. + // goog.math.Integer. + optional JSType jstype = 6 [default = JS_NORMAL]; + enum JSType { + // Use the default type. + JS_NORMAL = 0; + + // Use JavaScript strings. + JS_STRING = 1; + + // Use JavaScript numbers. + JS_NUMBER = 2; + } + + // Should this field be parsed lazily? Lazy applies only to message-type + // fields. It means that when the outer message is initially parsed, the + // inner message's contents will not be parsed but instead stored in encoded + // form. The inner message will actually be parsed when it is first accessed. + // + // This is only a hint. Implementations are free to choose whether to use + // eager or lazy parsing regardless of the value of this option. However, + // setting this option true suggests that the protocol author believes that + // using lazy parsing on this field is worth the additional bookkeeping + // overhead typically needed to implement it. + // + // This option does not affect the public interface of any generated code; + // all method signatures remain the same. Furthermore, thread-safety of the + // interface is not affected by this option; const methods remain safe to + // call from multiple threads concurrently, while non-const methods continue + // to require exclusive access. + // + // + // Note that implementations may choose not to check required fields within + // a lazy sub-message. That is, calling IsInitialized() on the outer message + // may return true even if the inner message has missing required fields. + // This is necessary because otherwise the inner message would have to be + // parsed in order to perform the check, defeating the purpose of lazy + // parsing. An implementation which chooses not to check required fields + // must be consistent about it. That is, for any particular sub-message, the + // implementation must either *always* check its required fields, or *never* + // check its required fields, regardless of whether or not the message has + // been parsed. + optional bool lazy = 5 [default=false]; + + // Is this field deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for accessors, or it will be completely ignored; in the very least, this + // is a formalization for deprecating fields. + optional bool deprecated = 3 [default=false]; + + // For Google-internal migration only. Do not use. + optional bool weak = 10 [default=false]; + + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; + + reserved 4; // removed jtype +} + +message OneofOptions { + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message EnumOptions { + + // Set this option to true to allow mapping different tag names to the same + // value. + optional bool allow_alias = 2; + + // Is this enum deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the enum, or it will be completely ignored; in the very least, this + // is a formalization for deprecating enums. + optional bool deprecated = 3 [default=false]; + + reserved 5; // javanano_as_lite + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message EnumValueOptions { + // Is this enum value deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the enum value, or it will be completely ignored; in the very least, + // this is a formalization for deprecating enum values. + optional bool deprecated = 1 [default=false]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message ServiceOptions { + + // Note: Field numbers 1 through 32 are reserved for Google's internal RPC + // framework. We apologize for hoarding these numbers to ourselves, but + // we were already using them long before we decided to release Protocol + // Buffers. + + // Is this service deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the service, or it will be completely ignored; in the very least, + // this is a formalization for deprecating services. + optional bool deprecated = 33 [default=false]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message MethodOptions { + + // Note: Field numbers 1 through 32 are reserved for Google's internal RPC + // framework. We apologize for hoarding these numbers to ourselves, but + // we were already using them long before we decided to release Protocol + // Buffers. + + // Is this method deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the method, or it will be completely ignored; in the very least, + // this is a formalization for deprecating methods. + optional bool deprecated = 33 [default=false]; + + // Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + // or neither? HTTP based RPC implementation may choose GET verb for safe + // methods, and PUT verb for idempotent methods instead of the default POST. + enum IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0; + NO_SIDE_EFFECTS = 1; // implies idempotent + IDEMPOTENT = 2; // idempotent, but may have side effects + } + optional IdempotencyLevel idempotency_level = + 34 [default=IDEMPOTENCY_UNKNOWN]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + + +// A message representing a option the parser does not recognize. This only +// appears in options protos created by the compiler::Parser class. +// DescriptorPool resolves these when building Descriptor objects. Therefore, +// options protos in descriptor objects (e.g. returned by Descriptor::options(), +// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions +// in them. +message UninterpretedOption { + // The name of the uninterpreted option. Each string represents a segment in + // a dot-separated name. is_extension is true iff a segment represents an + // extension (denoted with parentheses in options specs in .proto files). + // E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + // "foo.(bar.baz).qux". + message NamePart { + required string name_part = 1; + required bool is_extension = 2; + } + repeated NamePart name = 2; + + // The value of the uninterpreted option, in whatever type the tokenizer + // identified it as during parsing. Exactly one of these should be set. + optional string identifier_value = 3; + optional uint64 positive_int_value = 4; + optional int64 negative_int_value = 5; + optional double double_value = 6; + optional bytes string_value = 7; + optional string aggregate_value = 8; +} + +// =================================================================== +// Optional source code info + +// Encapsulates information about the original source file from which a +// FileDescriptorProto was generated. +message SourceCodeInfo { + // A Location identifies a piece of source code in a .proto file which + // corresponds to a particular definition. This information is intended + // to be useful to IDEs, code indexers, documentation generators, and similar + // tools. + // + // For example, say we have a file like: + // message Foo { + // optional string foo = 1; + // } + // Let's look at just the field definition: + // optional string foo = 1; + // ^ ^^ ^^ ^ ^^^ + // a bc de f ghi + // We have the following locations: + // span path represents + // [a,i) [ 4, 0, 2, 0 ] The whole field definition. + // [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + // [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + // [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + // [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + // + // Notes: + // - A location may refer to a repeated field itself (i.e. not to any + // particular index within it). This is used whenever a set of elements are + // logically enclosed in a single code segment. For example, an entire + // extend block (possibly containing multiple extension definitions) will + // have an outer location whose path refers to the "extensions" repeated + // field without an index. + // - Multiple locations may have the same path. This happens when a single + // logical declaration is spread out across multiple places. The most + // obvious example is the "extend" block again -- there may be multiple + // extend blocks in the same scope, each of which will have the same path. + // - A location's span is not always a subset of its parent's span. For + // example, the "extendee" of an extension declaration appears at the + // beginning of the "extend" block and is shared by all extensions within + // the block. + // - Just because a location's span is a subset of some other location's span + // does not mean that it is a descendent. For example, a "group" defines + // both a type and a field in a single declaration. Thus, the locations + // corresponding to the type and field and their components will overlap. + // - Code which tries to interpret locations should probably be designed to + // ignore those that it doesn't understand, as more types of locations could + // be recorded in the future. + repeated Location location = 1; + message Location { + // Identifies which part of the FileDescriptorProto was defined at this + // location. + // + // Each element is a field number or an index. They form a path from + // the root FileDescriptorProto to the place where the definition. For + // example, this path: + // [ 4, 3, 2, 7, 1 ] + // refers to: + // file.message_type(3) // 4, 3 + // .field(7) // 2, 7 + // .name() // 1 + // This is because FileDescriptorProto.message_type has field number 4: + // repeated DescriptorProto message_type = 4; + // and DescriptorProto.field has field number 2: + // repeated FieldDescriptorProto field = 2; + // and FieldDescriptorProto.name has field number 1: + // optional string name = 1; + // + // Thus, the above path gives the location of a field name. If we removed + // the last element: + // [ 4, 3, 2, 7 ] + // this path refers to the whole field declaration (from the beginning + // of the label to the terminating semicolon). + repeated int32 path = 1 [packed=true]; + + // Always has exactly three or four elements: start line, start column, + // end line (optional, otherwise assumed same as start line), end column. + // These are packed into a single field for efficiency. Note that line + // and column numbers are zero-based -- typically you will want to add + // 1 to each before displaying to a user. + repeated int32 span = 2 [packed=true]; + + // If this SourceCodeInfo represents a complete declaration, these are any + // comments appearing before and after the declaration which appear to be + // attached to the declaration. + // + // A series of line comments appearing on consecutive lines, with no other + // tokens appearing on those lines, will be treated as a single comment. + // + // leading_detached_comments will keep paragraphs of comments that appear + // before (but not connected to) the current element. Each paragraph, + // separated by empty lines, will be one comment element in the repeated + // field. + // + // Only the comment content is provided; comment markers (e.g. //) are + // stripped out. For block comments, leading whitespace and an asterisk + // will be stripped from the beginning of each line other than the first. + // Newlines are included in the output. + // + // Examples: + // + // optional int32 foo = 1; // Comment attached to foo. + // // Comment attached to bar. + // optional int32 bar = 2; + // + // optional string baz = 3; + // // Comment attached to baz. + // // Another line attached to baz. + // + // // Comment attached to qux. + // // + // // Another line attached to qux. + // optional double qux = 4; + // + // // Detached comment for corge. This is not leading or trailing comments + // // to qux or corge because there are blank lines separating it from + // // both. + // + // // Detached comment for corge paragraph 2. + // + // optional string corge = 5; + // /* Block comment attached + // * to corge. Leading asterisks + // * will be removed. */ + // /* Block comment attached to + // * grault. */ + // optional int32 grault = 6; + // + // // ignored detached comments. + optional string leading_comments = 3; + optional string trailing_comments = 4; + repeated string leading_detached_comments = 6; + } +} + +// Describes the relationship between generated code and its original source +// file. A GeneratedCodeInfo message is associated with only one generated +// source file, but may contain references to different source .proto files. +message GeneratedCodeInfo { + // An Annotation connects some span of text in generated code to an element + // of its generating .proto file. + repeated Annotation annotation = 1; + message Annotation { + // Identifies the element in the original source .proto file. This field + // is formatted the same as SourceCodeInfo.Location.path. + repeated int32 path = 1 [packed=true]; + + // Identifies the filesystem path to the original source .proto. + optional string source_file = 2; + + // Identifies the starting offset in bytes in the generated code + // that relates to the identified object. + optional int32 begin = 3; + + // Identifies the ending offset in bytes in the generated code that + // relates to the identified offset. The end offset should be one past + // the last relevant byte (so the length of the text = end - begin). + optional int32 end = 4; + } +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/doc.go b/vendor/github.com/golang/protobuf/protoc-gen-go/doc.go new file mode 100644 index 000000000..0d6055d61 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/doc.go @@ -0,0 +1,51 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2010 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +/* + A plugin for the Google protocol buffer compiler to generate Go code. + Run it by building this program and putting it in your path with the name + protoc-gen-go + That word 'go' at the end becomes part of the option string set for the + protocol compiler, so once the protocol compiler (protoc) is installed + you can run + protoc --go_out=output_directory input_directory/file.proto + to generate Go bindings for the protocol defined by file.proto. + With that input, the output will be written to + output_directory/file.pb.go + + The generated code is documented in the package comment for + the library. + + See the README and documentation for protocol buffers to learn more: + https://developers.google.com/protocol-buffers/ + +*/ +package documentation diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/generator/Makefile b/vendor/github.com/golang/protobuf/protoc-gen-go/generator/Makefile new file mode 100644 index 000000000..b5715c357 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/generator/Makefile @@ -0,0 +1,40 @@ +# Go support for Protocol Buffers - Google's data interchange format +# +# Copyright 2010 The Go Authors. All rights reserved. +# https://github.com/golang/protobuf +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +include $(GOROOT)/src/Make.inc + +TARG=github.com/golang/protobuf/compiler/generator +GOFILES=\ + generator.go\ + +DEPS=../descriptor ../plugin ../../proto + +include $(GOROOT)/src/Make.pkg diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/generator/generator.go b/vendor/github.com/golang/protobuf/protoc-gen-go/generator/generator.go new file mode 100644 index 000000000..60d524645 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/generator/generator.go @@ -0,0 +1,2866 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2010 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +/* + The code generator for the plugin for the Google protocol buffer compiler. + It generates Go code from the protocol buffer description files read by the + main routine. +*/ +package generator + +import ( + "bufio" + "bytes" + "compress/gzip" + "fmt" + "go/parser" + "go/printer" + "go/token" + "log" + "os" + "path" + "strconv" + "strings" + "unicode" + "unicode/utf8" + + "github.com/golang/protobuf/proto" + + "github.com/golang/protobuf/protoc-gen-go/descriptor" + plugin "github.com/golang/protobuf/protoc-gen-go/plugin" +) + +// generatedCodeVersion indicates a version of the generated code. +// It is incremented whenever an incompatibility between the generated code and +// proto package is introduced; the generated code references +// a constant, proto.ProtoPackageIsVersionN (where N is generatedCodeVersion). +const generatedCodeVersion = 2 + +// A Plugin provides functionality to add to the output during Go code generation, +// such as to produce RPC stubs. +type Plugin interface { + // Name identifies the plugin. + Name() string + // Init is called once after data structures are built but before + // code generation begins. + Init(g *Generator) + // Generate produces the code generated by the plugin for this file, + // except for the imports, by calling the generator's methods P, In, and Out. + Generate(file *FileDescriptor) + // GenerateImports produces the import declarations for this file. + // It is called after Generate. + GenerateImports(file *FileDescriptor) +} + +var plugins []Plugin + +// RegisterPlugin installs a (second-order) plugin to be run when the Go output is generated. +// It is typically called during initialization. +func RegisterPlugin(p Plugin) { + plugins = append(plugins, p) +} + +// Each type we import as a protocol buffer (other than FileDescriptorProto) needs +// a pointer to the FileDescriptorProto that represents it. These types achieve that +// wrapping by placing each Proto inside a struct with the pointer to its File. The +// structs have the same names as their contents, with "Proto" removed. +// FileDescriptor is used to store the things that it points to. + +// The file and package name method are common to messages and enums. +type common struct { + file *descriptor.FileDescriptorProto // File this object comes from. +} + +// PackageName is name in the package clause in the generated file. +func (c *common) PackageName() string { return uniquePackageOf(c.file) } + +func (c *common) File() *descriptor.FileDescriptorProto { return c.file } + +func fileIsProto3(file *descriptor.FileDescriptorProto) bool { + return file.GetSyntax() == "proto3" +} + +func (c *common) proto3() bool { return fileIsProto3(c.file) } + +// Descriptor represents a protocol buffer message. +type Descriptor struct { + common + *descriptor.DescriptorProto + parent *Descriptor // The containing message, if any. + nested []*Descriptor // Inner messages, if any. + enums []*EnumDescriptor // Inner enums, if any. + ext []*ExtensionDescriptor // Extensions, if any. + typename []string // Cached typename vector. + index int // The index into the container, whether the file or another message. + path string // The SourceCodeInfo path as comma-separated integers. + group bool +} + +// TypeName returns the elements of the dotted type name. +// The package name is not part of this name. +func (d *Descriptor) TypeName() []string { + if d.typename != nil { + return d.typename + } + n := 0 + for parent := d; parent != nil; parent = parent.parent { + n++ + } + s := make([]string, n, n) + for parent := d; parent != nil; parent = parent.parent { + n-- + s[n] = parent.GetName() + } + d.typename = s + return s +} + +// EnumDescriptor describes an enum. If it's at top level, its parent will be nil. +// Otherwise it will be the descriptor of the message in which it is defined. +type EnumDescriptor struct { + common + *descriptor.EnumDescriptorProto + parent *Descriptor // The containing message, if any. + typename []string // Cached typename vector. + index int // The index into the container, whether the file or a message. + path string // The SourceCodeInfo path as comma-separated integers. +} + +// TypeName returns the elements of the dotted type name. +// The package name is not part of this name. +func (e *EnumDescriptor) TypeName() (s []string) { + if e.typename != nil { + return e.typename + } + name := e.GetName() + if e.parent == nil { + s = make([]string, 1) + } else { + pname := e.parent.TypeName() + s = make([]string, len(pname)+1) + copy(s, pname) + } + s[len(s)-1] = name + e.typename = s + return s +} + +// Everything but the last element of the full type name, CamelCased. +// The values of type Foo.Bar are call Foo_value1... not Foo_Bar_value1... . +func (e *EnumDescriptor) prefix() string { + if e.parent == nil { + // If the enum is not part of a message, the prefix is just the type name. + return CamelCase(*e.Name) + "_" + } + typeName := e.TypeName() + return CamelCaseSlice(typeName[0:len(typeName)-1]) + "_" +} + +// The integer value of the named constant in this enumerated type. +func (e *EnumDescriptor) integerValueAsString(name string) string { + for _, c := range e.Value { + if c.GetName() == name { + return fmt.Sprint(c.GetNumber()) + } + } + log.Fatal("cannot find value for enum constant") + return "" +} + +// ExtensionDescriptor describes an extension. If it's at top level, its parent will be nil. +// Otherwise it will be the descriptor of the message in which it is defined. +type ExtensionDescriptor struct { + common + *descriptor.FieldDescriptorProto + parent *Descriptor // The containing message, if any. +} + +// TypeName returns the elements of the dotted type name. +// The package name is not part of this name. +func (e *ExtensionDescriptor) TypeName() (s []string) { + name := e.GetName() + if e.parent == nil { + // top-level extension + s = make([]string, 1) + } else { + pname := e.parent.TypeName() + s = make([]string, len(pname)+1) + copy(s, pname) + } + s[len(s)-1] = name + return s +} + +// DescName returns the variable name used for the generated descriptor. +func (e *ExtensionDescriptor) DescName() string { + // The full type name. + typeName := e.TypeName() + // Each scope of the extension is individually CamelCased, and all are joined with "_" with an "E_" prefix. + for i, s := range typeName { + typeName[i] = CamelCase(s) + } + return "E_" + strings.Join(typeName, "_") +} + +// ImportedDescriptor describes a type that has been publicly imported from another file. +type ImportedDescriptor struct { + common + o Object +} + +func (id *ImportedDescriptor) TypeName() []string { return id.o.TypeName() } + +// FileDescriptor describes an protocol buffer descriptor file (.proto). +// It includes slices of all the messages and enums defined within it. +// Those slices are constructed by WrapTypes. +type FileDescriptor struct { + *descriptor.FileDescriptorProto + desc []*Descriptor // All the messages defined in this file. + enum []*EnumDescriptor // All the enums defined in this file. + ext []*ExtensionDescriptor // All the top-level extensions defined in this file. + imp []*ImportedDescriptor // All types defined in files publicly imported by this file. + + // Comments, stored as a map of path (comma-separated integers) to the comment. + comments map[string]*descriptor.SourceCodeInfo_Location + + // The full list of symbols that are exported, + // as a map from the exported object to its symbols. + // This is used for supporting public imports. + exported map[Object][]symbol + + index int // The index of this file in the list of files to generate code for + + proto3 bool // whether to generate proto3 code for this file +} + +// PackageName is the package name we'll use in the generated code to refer to this file. +func (d *FileDescriptor) PackageName() string { return uniquePackageOf(d.FileDescriptorProto) } + +// VarName is the variable name we'll use in the generated code to refer +// to the compressed bytes of this descriptor. It is not exported, so +// it is only valid inside the generated package. +func (d *FileDescriptor) VarName() string { return fmt.Sprintf("fileDescriptor%d", d.index) } + +// goPackageOption interprets the file's go_package option. +// If there is no go_package, it returns ("", "", false). +// If there's a simple name, it returns ("", pkg, true). +// If the option implies an import path, it returns (impPath, pkg, true). +func (d *FileDescriptor) goPackageOption() (impPath, pkg string, ok bool) { + pkg = d.GetOptions().GetGoPackage() + if pkg == "" { + return + } + ok = true + // The presence of a slash implies there's an import path. + slash := strings.LastIndex(pkg, "/") + if slash < 0 { + return + } + impPath, pkg = pkg, pkg[slash+1:] + // A semicolon-delimited suffix overrides the package name. + sc := strings.IndexByte(impPath, ';') + if sc < 0 { + return + } + impPath, pkg = impPath[:sc], impPath[sc+1:] + return +} + +// goPackageName returns the Go package name to use in the +// generated Go file. The result explicit reports whether the name +// came from an option go_package statement. If explicit is false, +// the name was derived from the protocol buffer's package statement +// or the input file name. +func (d *FileDescriptor) goPackageName() (name string, explicit bool) { + // Does the file have a "go_package" option? + if _, pkg, ok := d.goPackageOption(); ok { + return pkg, true + } + + // Does the file have a package clause? + if pkg := d.GetPackage(); pkg != "" { + return pkg, false + } + // Use the file base name. + return baseName(d.GetName()), false +} + +// goFileName returns the output name for the generated Go file. +func (d *FileDescriptor) goFileName() string { + name := *d.Name + if ext := path.Ext(name); ext == ".proto" || ext == ".protodevel" { + name = name[:len(name)-len(ext)] + } + name += ".pb.go" + + // Does the file have a "go_package" option? + // If it does, it may override the filename. + if impPath, _, ok := d.goPackageOption(); ok && impPath != "" { + // Replace the existing dirname with the declared import path. + _, name = path.Split(name) + name = path.Join(impPath, name) + return name + } + + return name +} + +func (d *FileDescriptor) addExport(obj Object, sym symbol) { + d.exported[obj] = append(d.exported[obj], sym) +} + +// symbol is an interface representing an exported Go symbol. +type symbol interface { + // GenerateAlias should generate an appropriate alias + // for the symbol from the named package. + GenerateAlias(g *Generator, pkg string) +} + +type messageSymbol struct { + sym string + hasExtensions, isMessageSet bool + hasOneof bool + getters []getterSymbol +} + +type getterSymbol struct { + name string + typ string + typeName string // canonical name in proto world; empty for proto.Message and similar + genType bool // whether typ contains a generated type (message/group/enum) +} + +func (ms *messageSymbol) GenerateAlias(g *Generator, pkg string) { + remoteSym := pkg + "." + ms.sym + + g.P("type ", ms.sym, " ", remoteSym) + g.P("func (m *", ms.sym, ") Reset() { (*", remoteSym, ")(m).Reset() }") + g.P("func (m *", ms.sym, ") String() string { return (*", remoteSym, ")(m).String() }") + g.P("func (*", ms.sym, ") ProtoMessage() {}") + if ms.hasExtensions { + g.P("func (*", ms.sym, ") ExtensionRangeArray() []", g.Pkg["proto"], ".ExtensionRange ", + "{ return (*", remoteSym, ")(nil).ExtensionRangeArray() }") + if ms.isMessageSet { + g.P("func (m *", ms.sym, ") Marshal() ([]byte, error) ", + "{ return (*", remoteSym, ")(m).Marshal() }") + g.P("func (m *", ms.sym, ") Unmarshal(buf []byte) error ", + "{ return (*", remoteSym, ")(m).Unmarshal(buf) }") + } + } + if ms.hasOneof { + // Oneofs and public imports do not mix well. + // We can make them work okay for the binary format, + // but they're going to break weirdly for text/JSON. + enc := "_" + ms.sym + "_OneofMarshaler" + dec := "_" + ms.sym + "_OneofUnmarshaler" + size := "_" + ms.sym + "_OneofSizer" + encSig := "(msg " + g.Pkg["proto"] + ".Message, b *" + g.Pkg["proto"] + ".Buffer) error" + decSig := "(msg " + g.Pkg["proto"] + ".Message, tag, wire int, b *" + g.Pkg["proto"] + ".Buffer) (bool, error)" + sizeSig := "(msg " + g.Pkg["proto"] + ".Message) int" + g.P("func (m *", ms.sym, ") XXX_OneofFuncs() (func", encSig, ", func", decSig, ", func", sizeSig, ", []interface{}) {") + g.P("return ", enc, ", ", dec, ", ", size, ", nil") + g.P("}") + + g.P("func ", enc, encSig, " {") + g.P("m := msg.(*", ms.sym, ")") + g.P("m0 := (*", remoteSym, ")(m)") + g.P("enc, _, _, _ := m0.XXX_OneofFuncs()") + g.P("return enc(m0, b)") + g.P("}") + + g.P("func ", dec, decSig, " {") + g.P("m := msg.(*", ms.sym, ")") + g.P("m0 := (*", remoteSym, ")(m)") + g.P("_, dec, _, _ := m0.XXX_OneofFuncs()") + g.P("return dec(m0, tag, wire, b)") + g.P("}") + + g.P("func ", size, sizeSig, " {") + g.P("m := msg.(*", ms.sym, ")") + g.P("m0 := (*", remoteSym, ")(m)") + g.P("_, _, size, _ := m0.XXX_OneofFuncs()") + g.P("return size(m0)") + g.P("}") + } + for _, get := range ms.getters { + + if get.typeName != "" { + g.RecordTypeUse(get.typeName) + } + typ := get.typ + val := "(*" + remoteSym + ")(m)." + get.name + "()" + if get.genType { + // typ will be "*pkg.T" (message/group) or "pkg.T" (enum) + // or "map[t]*pkg.T" (map to message/enum). + // The first two of those might have a "[]" prefix if it is repeated. + // Drop any package qualifier since we have hoisted the type into this package. + rep := strings.HasPrefix(typ, "[]") + if rep { + typ = typ[2:] + } + isMap := strings.HasPrefix(typ, "map[") + star := typ[0] == '*' + if !isMap { // map types handled lower down + typ = typ[strings.Index(typ, ".")+1:] + } + if star { + typ = "*" + typ + } + if rep { + // Go does not permit conversion between slice types where both + // element types are named. That means we need to generate a bit + // of code in this situation. + // typ is the element type. + // val is the expression to get the slice from the imported type. + + ctyp := typ // conversion type expression; "Foo" or "(*Foo)" + if star { + ctyp = "(" + typ + ")" + } + + g.P("func (m *", ms.sym, ") ", get.name, "() []", typ, " {") + g.In() + g.P("o := ", val) + g.P("if o == nil {") + g.In() + g.P("return nil") + g.Out() + g.P("}") + g.P("s := make([]", typ, ", len(o))") + g.P("for i, x := range o {") + g.In() + g.P("s[i] = ", ctyp, "(x)") + g.Out() + g.P("}") + g.P("return s") + g.Out() + g.P("}") + continue + } + if isMap { + // Split map[keyTyp]valTyp. + bra, ket := strings.Index(typ, "["), strings.Index(typ, "]") + keyTyp, valTyp := typ[bra+1:ket], typ[ket+1:] + // Drop any package qualifier. + // Only the value type may be foreign. + star := valTyp[0] == '*' + valTyp = valTyp[strings.Index(valTyp, ".")+1:] + if star { + valTyp = "*" + valTyp + } + + typ := "map[" + keyTyp + "]" + valTyp + g.P("func (m *", ms.sym, ") ", get.name, "() ", typ, " {") + g.P("o := ", val) + g.P("if o == nil { return nil }") + g.P("s := make(", typ, ", len(o))") + g.P("for k, v := range o {") + g.P("s[k] = (", valTyp, ")(v)") + g.P("}") + g.P("return s") + g.P("}") + continue + } + // Convert imported type into the forwarding type. + val = "(" + typ + ")(" + val + ")" + } + + g.P("func (m *", ms.sym, ") ", get.name, "() ", typ, " { return ", val, " }") + } + +} + +type enumSymbol struct { + name string + proto3 bool // Whether this came from a proto3 file. +} + +func (es enumSymbol) GenerateAlias(g *Generator, pkg string) { + s := es.name + g.P("type ", s, " ", pkg, ".", s) + g.P("var ", s, "_name = ", pkg, ".", s, "_name") + g.P("var ", s, "_value = ", pkg, ".", s, "_value") + g.P("func (x ", s, ") String() string { return (", pkg, ".", s, ")(x).String() }") + if !es.proto3 { + g.P("func (x ", s, ") Enum() *", s, "{ return (*", s, ")((", pkg, ".", s, ")(x).Enum()) }") + g.P("func (x *", s, ") UnmarshalJSON(data []byte) error { return (*", pkg, ".", s, ")(x).UnmarshalJSON(data) }") + } +} + +type constOrVarSymbol struct { + sym string + typ string // either "const" or "var" + cast string // if non-empty, a type cast is required (used for enums) +} + +func (cs constOrVarSymbol) GenerateAlias(g *Generator, pkg string) { + v := pkg + "." + cs.sym + if cs.cast != "" { + v = cs.cast + "(" + v + ")" + } + g.P(cs.typ, " ", cs.sym, " = ", v) +} + +// Object is an interface abstracting the abilities shared by enums, messages, extensions and imported objects. +type Object interface { + PackageName() string // The name we use in our output (a_b_c), possibly renamed for uniqueness. + TypeName() []string + File() *descriptor.FileDescriptorProto +} + +// Each package name we generate must be unique. The package we're generating +// gets its own name but every other package must have a unique name that does +// not conflict in the code we generate. These names are chosen globally (although +// they don't have to be, it simplifies things to do them globally). +func uniquePackageOf(fd *descriptor.FileDescriptorProto) string { + s, ok := uniquePackageName[fd] + if !ok { + log.Fatal("internal error: no package name defined for " + fd.GetName()) + } + return s +} + +// Generator is the type whose methods generate the output, stored in the associated response structure. +type Generator struct { + *bytes.Buffer + + Request *plugin.CodeGeneratorRequest // The input. + Response *plugin.CodeGeneratorResponse // The output. + + Param map[string]string // Command-line parameters. + PackageImportPath string // Go import path of the package we're generating code for + ImportPrefix string // String to prefix to imported package file names. + ImportMap map[string]string // Mapping from .proto file name to import path + + Pkg map[string]string // The names under which we import support packages + + packageName string // What we're calling ourselves. + allFiles []*FileDescriptor // All files in the tree + allFilesByName map[string]*FileDescriptor // All files by filename. + genFiles []*FileDescriptor // Those files we will generate output for. + file *FileDescriptor // The file we are compiling now. + usedPackages map[string]bool // Names of packages used in current file. + typeNameToObject map[string]Object // Key is a fully-qualified name in input syntax. + init []string // Lines to emit in the init function. + indent string + writeOutput bool +} + +// New creates a new generator and allocates the request and response protobufs. +func New() *Generator { + g := new(Generator) + g.Buffer = new(bytes.Buffer) + g.Request = new(plugin.CodeGeneratorRequest) + g.Response = new(plugin.CodeGeneratorResponse) + return g +} + +// Error reports a problem, including an error, and exits the program. +func (g *Generator) Error(err error, msgs ...string) { + s := strings.Join(msgs, " ") + ":" + err.Error() + log.Print("protoc-gen-go: error:", s) + os.Exit(1) +} + +// Fail reports a problem and exits the program. +func (g *Generator) Fail(msgs ...string) { + s := strings.Join(msgs, " ") + log.Print("protoc-gen-go: error:", s) + os.Exit(1) +} + +// CommandLineParameters breaks the comma-separated list of key=value pairs +// in the parameter (a member of the request protobuf) into a key/value map. +// It then sets file name mappings defined by those entries. +func (g *Generator) CommandLineParameters(parameter string) { + g.Param = make(map[string]string) + for _, p := range strings.Split(parameter, ",") { + if i := strings.Index(p, "="); i < 0 { + g.Param[p] = "" + } else { + g.Param[p[0:i]] = p[i+1:] + } + } + + g.ImportMap = make(map[string]string) + pluginList := "none" // Default list of plugin names to enable (empty means all). + for k, v := range g.Param { + switch k { + case "import_prefix": + g.ImportPrefix = v + case "import_path": + g.PackageImportPath = v + case "plugins": + pluginList = v + default: + if len(k) > 0 && k[0] == 'M' { + g.ImportMap[k[1:]] = v + } + } + } + if pluginList != "" { + // Amend the set of plugins. + enabled := make(map[string]bool) + for _, name := range strings.Split(pluginList, "+") { + enabled[name] = true + } + var nplugins []Plugin + for _, p := range plugins { + if enabled[p.Name()] { + nplugins = append(nplugins, p) + } + } + plugins = nplugins + } +} + +// DefaultPackageName returns the package name printed for the object. +// If its file is in a different package, it returns the package name we're using for this file, plus ".". +// Otherwise it returns the empty string. +func (g *Generator) DefaultPackageName(obj Object) string { + pkg := obj.PackageName() + if pkg == g.packageName { + return "" + } + return pkg + "." +} + +// For each input file, the unique package name to use, underscored. +var uniquePackageName = make(map[*descriptor.FileDescriptorProto]string) + +// Package names already registered. Key is the name from the .proto file; +// value is the name that appears in the generated code. +var pkgNamesInUse = make(map[string]bool) + +// Create and remember a guaranteed unique package name for this file descriptor. +// Pkg is the candidate name. If f is nil, it's a builtin package like "proto" and +// has no file descriptor. +func RegisterUniquePackageName(pkg string, f *FileDescriptor) string { + // Convert dots to underscores before finding a unique alias. + pkg = strings.Map(badToUnderscore, pkg) + + for i, orig := 1, pkg; pkgNamesInUse[pkg]; i++ { + // It's a duplicate; must rename. + pkg = orig + strconv.Itoa(i) + } + // Install it. + pkgNamesInUse[pkg] = true + if f != nil { + uniquePackageName[f.FileDescriptorProto] = pkg + } + return pkg +} + +var isGoKeyword = map[string]bool{ + "break": true, + "case": true, + "chan": true, + "const": true, + "continue": true, + "default": true, + "else": true, + "defer": true, + "fallthrough": true, + "for": true, + "func": true, + "go": true, + "goto": true, + "if": true, + "import": true, + "interface": true, + "map": true, + "package": true, + "range": true, + "return": true, + "select": true, + "struct": true, + "switch": true, + "type": true, + "var": true, +} + +// defaultGoPackage returns the package name to use, +// derived from the import path of the package we're building code for. +func (g *Generator) defaultGoPackage() string { + p := g.PackageImportPath + if i := strings.LastIndex(p, "/"); i >= 0 { + p = p[i+1:] + } + if p == "" { + return "" + } + + p = strings.Map(badToUnderscore, p) + // Identifier must not be keyword: insert _. + if isGoKeyword[p] { + p = "_" + p + } + // Identifier must not begin with digit: insert _. + if r, _ := utf8.DecodeRuneInString(p); unicode.IsDigit(r) { + p = "_" + p + } + return p +} + +// SetPackageNames sets the package name for this run. +// The package name must agree across all files being generated. +// It also defines unique package names for all imported files. +func (g *Generator) SetPackageNames() { + // Register the name for this package. It will be the first name + // registered so is guaranteed to be unmodified. + pkg, explicit := g.genFiles[0].goPackageName() + + // Check all files for an explicit go_package option. + for _, f := range g.genFiles { + thisPkg, thisExplicit := f.goPackageName() + if thisExplicit { + if !explicit { + // Let this file's go_package option serve for all input files. + pkg, explicit = thisPkg, true + } else if thisPkg != pkg { + g.Fail("inconsistent package names:", thisPkg, pkg) + } + } + } + + // If we don't have an explicit go_package option but we have an + // import path, use that. + if !explicit { + p := g.defaultGoPackage() + if p != "" { + pkg, explicit = p, true + } + } + + // If there was no go_package and no import path to use, + // double-check that all the inputs have the same implicit + // Go package name. + if !explicit { + for _, f := range g.genFiles { + thisPkg, _ := f.goPackageName() + if thisPkg != pkg { + g.Fail("inconsistent package names:", thisPkg, pkg) + } + } + } + + g.packageName = RegisterUniquePackageName(pkg, g.genFiles[0]) + + // Register the support package names. They might collide with the + // name of a package we import. + g.Pkg = map[string]string{ + "fmt": RegisterUniquePackageName("fmt", nil), + "math": RegisterUniquePackageName("math", nil), + "proto": RegisterUniquePackageName("proto", nil), + } + +AllFiles: + for _, f := range g.allFiles { + for _, genf := range g.genFiles { + if f == genf { + // In this package already. + uniquePackageName[f.FileDescriptorProto] = g.packageName + continue AllFiles + } + } + // The file is a dependency, so we want to ignore its go_package option + // because that is only relevant for its specific generated output. + pkg := f.GetPackage() + if pkg == "" { + pkg = baseName(*f.Name) + } + RegisterUniquePackageName(pkg, f) + } +} + +// WrapTypes walks the incoming data, wrapping DescriptorProtos, EnumDescriptorProtos +// and FileDescriptorProtos into file-referenced objects within the Generator. +// It also creates the list of files to generate and so should be called before GenerateAllFiles. +func (g *Generator) WrapTypes() { + g.allFiles = make([]*FileDescriptor, 0, len(g.Request.ProtoFile)) + g.allFilesByName = make(map[string]*FileDescriptor, len(g.allFiles)) + for _, f := range g.Request.ProtoFile { + // We must wrap the descriptors before we wrap the enums + descs := wrapDescriptors(f) + g.buildNestedDescriptors(descs) + enums := wrapEnumDescriptors(f, descs) + g.buildNestedEnums(descs, enums) + exts := wrapExtensions(f) + fd := &FileDescriptor{ + FileDescriptorProto: f, + desc: descs, + enum: enums, + ext: exts, + exported: make(map[Object][]symbol), + proto3: fileIsProto3(f), + } + extractComments(fd) + g.allFiles = append(g.allFiles, fd) + g.allFilesByName[f.GetName()] = fd + } + for _, fd := range g.allFiles { + fd.imp = wrapImported(fd.FileDescriptorProto, g) + } + + g.genFiles = make([]*FileDescriptor, 0, len(g.Request.FileToGenerate)) + for _, fileName := range g.Request.FileToGenerate { + fd := g.allFilesByName[fileName] + if fd == nil { + g.Fail("could not find file named", fileName) + } + fd.index = len(g.genFiles) + g.genFiles = append(g.genFiles, fd) + } +} + +// Scan the descriptors in this file. For each one, build the slice of nested descriptors +func (g *Generator) buildNestedDescriptors(descs []*Descriptor) { + for _, desc := range descs { + if len(desc.NestedType) != 0 { + for _, nest := range descs { + if nest.parent == desc { + desc.nested = append(desc.nested, nest) + } + } + if len(desc.nested) != len(desc.NestedType) { + g.Fail("internal error: nesting failure for", desc.GetName()) + } + } + } +} + +func (g *Generator) buildNestedEnums(descs []*Descriptor, enums []*EnumDescriptor) { + for _, desc := range descs { + if len(desc.EnumType) != 0 { + for _, enum := range enums { + if enum.parent == desc { + desc.enums = append(desc.enums, enum) + } + } + if len(desc.enums) != len(desc.EnumType) { + g.Fail("internal error: enum nesting failure for", desc.GetName()) + } + } + } +} + +// Construct the Descriptor +func newDescriptor(desc *descriptor.DescriptorProto, parent *Descriptor, file *descriptor.FileDescriptorProto, index int) *Descriptor { + d := &Descriptor{ + common: common{file}, + DescriptorProto: desc, + parent: parent, + index: index, + } + if parent == nil { + d.path = fmt.Sprintf("%d,%d", messagePath, index) + } else { + d.path = fmt.Sprintf("%s,%d,%d", parent.path, messageMessagePath, index) + } + + // The only way to distinguish a group from a message is whether + // the containing message has a TYPE_GROUP field that matches. + if parent != nil { + parts := d.TypeName() + if file.Package != nil { + parts = append([]string{*file.Package}, parts...) + } + exp := "." + strings.Join(parts, ".") + for _, field := range parent.Field { + if field.GetType() == descriptor.FieldDescriptorProto_TYPE_GROUP && field.GetTypeName() == exp { + d.group = true + break + } + } + } + + for _, field := range desc.Extension { + d.ext = append(d.ext, &ExtensionDescriptor{common{file}, field, d}) + } + + return d +} + +// Return a slice of all the Descriptors defined within this file +func wrapDescriptors(file *descriptor.FileDescriptorProto) []*Descriptor { + sl := make([]*Descriptor, 0, len(file.MessageType)+10) + for i, desc := range file.MessageType { + sl = wrapThisDescriptor(sl, desc, nil, file, i) + } + return sl +} + +// Wrap this Descriptor, recursively +func wrapThisDescriptor(sl []*Descriptor, desc *descriptor.DescriptorProto, parent *Descriptor, file *descriptor.FileDescriptorProto, index int) []*Descriptor { + sl = append(sl, newDescriptor(desc, parent, file, index)) + me := sl[len(sl)-1] + for i, nested := range desc.NestedType { + sl = wrapThisDescriptor(sl, nested, me, file, i) + } + return sl +} + +// Construct the EnumDescriptor +func newEnumDescriptor(desc *descriptor.EnumDescriptorProto, parent *Descriptor, file *descriptor.FileDescriptorProto, index int) *EnumDescriptor { + ed := &EnumDescriptor{ + common: common{file}, + EnumDescriptorProto: desc, + parent: parent, + index: index, + } + if parent == nil { + ed.path = fmt.Sprintf("%d,%d", enumPath, index) + } else { + ed.path = fmt.Sprintf("%s,%d,%d", parent.path, messageEnumPath, index) + } + return ed +} + +// Return a slice of all the EnumDescriptors defined within this file +func wrapEnumDescriptors(file *descriptor.FileDescriptorProto, descs []*Descriptor) []*EnumDescriptor { + sl := make([]*EnumDescriptor, 0, len(file.EnumType)+10) + // Top-level enums. + for i, enum := range file.EnumType { + sl = append(sl, newEnumDescriptor(enum, nil, file, i)) + } + // Enums within messages. Enums within embedded messages appear in the outer-most message. + for _, nested := range descs { + for i, enum := range nested.EnumType { + sl = append(sl, newEnumDescriptor(enum, nested, file, i)) + } + } + return sl +} + +// Return a slice of all the top-level ExtensionDescriptors defined within this file. +func wrapExtensions(file *descriptor.FileDescriptorProto) []*ExtensionDescriptor { + var sl []*ExtensionDescriptor + for _, field := range file.Extension { + sl = append(sl, &ExtensionDescriptor{common{file}, field, nil}) + } + return sl +} + +// Return a slice of all the types that are publicly imported into this file. +func wrapImported(file *descriptor.FileDescriptorProto, g *Generator) (sl []*ImportedDescriptor) { + for _, index := range file.PublicDependency { + df := g.fileByName(file.Dependency[index]) + for _, d := range df.desc { + if d.GetOptions().GetMapEntry() { + continue + } + sl = append(sl, &ImportedDescriptor{common{file}, d}) + } + for _, e := range df.enum { + sl = append(sl, &ImportedDescriptor{common{file}, e}) + } + for _, ext := range df.ext { + sl = append(sl, &ImportedDescriptor{common{file}, ext}) + } + } + return +} + +func extractComments(file *FileDescriptor) { + file.comments = make(map[string]*descriptor.SourceCodeInfo_Location) + for _, loc := range file.GetSourceCodeInfo().GetLocation() { + if loc.LeadingComments == nil { + continue + } + var p []string + for _, n := range loc.Path { + p = append(p, strconv.Itoa(int(n))) + } + file.comments[strings.Join(p, ",")] = loc + } +} + +// BuildTypeNameMap builds the map from fully qualified type names to objects. +// The key names for the map come from the input data, which puts a period at the beginning. +// It should be called after SetPackageNames and before GenerateAllFiles. +func (g *Generator) BuildTypeNameMap() { + g.typeNameToObject = make(map[string]Object) + for _, f := range g.allFiles { + // The names in this loop are defined by the proto world, not us, so the + // package name may be empty. If so, the dotted package name of X will + // be ".X"; otherwise it will be ".pkg.X". + dottedPkg := "." + f.GetPackage() + if dottedPkg != "." { + dottedPkg += "." + } + for _, enum := range f.enum { + name := dottedPkg + dottedSlice(enum.TypeName()) + g.typeNameToObject[name] = enum + } + for _, desc := range f.desc { + name := dottedPkg + dottedSlice(desc.TypeName()) + g.typeNameToObject[name] = desc + } + } +} + +// ObjectNamed, given a fully-qualified input type name as it appears in the input data, +// returns the descriptor for the message or enum with that name. +func (g *Generator) ObjectNamed(typeName string) Object { + o, ok := g.typeNameToObject[typeName] + if !ok { + g.Fail("can't find object with type", typeName) + } + + // If the file of this object isn't a direct dependency of the current file, + // or in the current file, then this object has been publicly imported into + // a dependency of the current file. + // We should return the ImportedDescriptor object for it instead. + direct := *o.File().Name == *g.file.Name + if !direct { + for _, dep := range g.file.Dependency { + if *g.fileByName(dep).Name == *o.File().Name { + direct = true + break + } + } + } + if !direct { + found := false + Loop: + for _, dep := range g.file.Dependency { + df := g.fileByName(*g.fileByName(dep).Name) + for _, td := range df.imp { + if td.o == o { + // Found it! + o = td + found = true + break Loop + } + } + } + if !found { + log.Printf("protoc-gen-go: WARNING: failed finding publicly imported dependency for %v, used in %v", typeName, *g.file.Name) + } + } + + return o +} + +// P prints the arguments to the generated output. It handles strings and int32s, plus +// handling indirections because they may be *string, etc. +func (g *Generator) P(str ...interface{}) { + if !g.writeOutput { + return + } + g.WriteString(g.indent) + for _, v := range str { + switch s := v.(type) { + case string: + g.WriteString(s) + case *string: + g.WriteString(*s) + case bool: + fmt.Fprintf(g, "%t", s) + case *bool: + fmt.Fprintf(g, "%t", *s) + case int: + fmt.Fprintf(g, "%d", s) + case *int32: + fmt.Fprintf(g, "%d", *s) + case *int64: + fmt.Fprintf(g, "%d", *s) + case float64: + fmt.Fprintf(g, "%g", s) + case *float64: + fmt.Fprintf(g, "%g", *s) + default: + g.Fail(fmt.Sprintf("unknown type in printer: %T", v)) + } + } + g.WriteByte('\n') +} + +// addInitf stores the given statement to be printed inside the file's init function. +// The statement is given as a format specifier and arguments. +func (g *Generator) addInitf(stmt string, a ...interface{}) { + g.init = append(g.init, fmt.Sprintf(stmt, a...)) +} + +// In Indents the output one tab stop. +func (g *Generator) In() { g.indent += "\t" } + +// Out unindents the output one tab stop. +func (g *Generator) Out() { + if len(g.indent) > 0 { + g.indent = g.indent[1:] + } +} + +// GenerateAllFiles generates the output for all the files we're outputting. +func (g *Generator) GenerateAllFiles() { + // Initialize the plugins + for _, p := range plugins { + p.Init(g) + } + // Generate the output. The generator runs for every file, even the files + // that we don't generate output for, so that we can collate the full list + // of exported symbols to support public imports. + genFileMap := make(map[*FileDescriptor]bool, len(g.genFiles)) + for _, file := range g.genFiles { + genFileMap[file] = true + } + for _, file := range g.allFiles { + g.Reset() + g.writeOutput = genFileMap[file] + g.generate(file) + if !g.writeOutput { + continue + } + g.Response.File = append(g.Response.File, &plugin.CodeGeneratorResponse_File{ + Name: proto.String(file.goFileName()), + Content: proto.String(g.String()), + }) + } +} + +// Run all the plugins associated with the file. +func (g *Generator) runPlugins(file *FileDescriptor) { + for _, p := range plugins { + p.Generate(file) + } +} + +// FileOf return the FileDescriptor for this FileDescriptorProto. +func (g *Generator) FileOf(fd *descriptor.FileDescriptorProto) *FileDescriptor { + for _, file := range g.allFiles { + if file.FileDescriptorProto == fd { + return file + } + } + g.Fail("could not find file in table:", fd.GetName()) + return nil +} + +// Fill the response protocol buffer with the generated output for all the files we're +// supposed to generate. +func (g *Generator) generate(file *FileDescriptor) { + g.file = g.FileOf(file.FileDescriptorProto) + g.usedPackages = make(map[string]bool) + + if g.file.index == 0 { + // For one file in the package, assert version compatibility. + g.P("// This is a compile-time assertion to ensure that this generated file") + g.P("// is compatible with the proto package it is being compiled against.") + g.P("// A compilation error at this line likely means your copy of the") + g.P("// proto package needs to be updated.") + g.P("const _ = ", g.Pkg["proto"], ".ProtoPackageIsVersion", generatedCodeVersion, " // please upgrade the proto package") + g.P() + } + for _, td := range g.file.imp { + g.generateImported(td) + } + for _, enum := range g.file.enum { + g.generateEnum(enum) + } + for _, desc := range g.file.desc { + // Don't generate virtual messages for maps. + if desc.GetOptions().GetMapEntry() { + continue + } + g.generateMessage(desc) + } + for _, ext := range g.file.ext { + g.generateExtension(ext) + } + g.generateInitFunction() + + // Run the plugins before the imports so we know which imports are necessary. + g.runPlugins(file) + + g.generateFileDescriptor(file) + + // Generate header and imports last, though they appear first in the output. + rem := g.Buffer + g.Buffer = new(bytes.Buffer) + g.generateHeader() + g.generateImports() + if !g.writeOutput { + return + } + g.Write(rem.Bytes()) + + // Reformat generated code. + fset := token.NewFileSet() + raw := g.Bytes() + ast, err := parser.ParseFile(fset, "", g, parser.ParseComments) + if err != nil { + // Print out the bad code with line numbers. + // This should never happen in practice, but it can while changing generated code, + // so consider this a debugging aid. + var src bytes.Buffer + s := bufio.NewScanner(bytes.NewReader(raw)) + for line := 1; s.Scan(); line++ { + fmt.Fprintf(&src, "%5d\t%s\n", line, s.Bytes()) + } + g.Fail("bad Go source code was generated:", err.Error(), "\n"+src.String()) + } + g.Reset() + err = (&printer.Config{Mode: printer.TabIndent | printer.UseSpaces, Tabwidth: 8}).Fprint(g, fset, ast) + if err != nil { + g.Fail("generated Go source code could not be reformatted:", err.Error()) + } +} + +// Generate the header, including package definition +func (g *Generator) generateHeader() { + g.P("// Code generated by protoc-gen-go. DO NOT EDIT.") + g.P("// source: ", g.file.Name) + g.P() + + name := g.file.PackageName() + + if g.file.index == 0 { + // Generate package docs for the first file in the package. + g.P("/*") + g.P("Package ", name, " is a generated protocol buffer package.") + g.P() + if loc, ok := g.file.comments[strconv.Itoa(packagePath)]; ok { + // not using g.PrintComments because this is a /* */ comment block. + text := strings.TrimSuffix(loc.GetLeadingComments(), "\n") + for _, line := range strings.Split(text, "\n") { + line = strings.TrimPrefix(line, " ") + // ensure we don't escape from the block comment + line = strings.Replace(line, "*/", "* /", -1) + g.P(line) + } + g.P() + } + var topMsgs []string + g.P("It is generated from these files:") + for _, f := range g.genFiles { + g.P("\t", f.Name) + for _, msg := range f.desc { + if msg.parent != nil { + continue + } + topMsgs = append(topMsgs, CamelCaseSlice(msg.TypeName())) + } + } + g.P() + g.P("It has these top-level messages:") + for _, msg := range topMsgs { + g.P("\t", msg) + } + g.P("*/") + } + + g.P("package ", name) + g.P() +} + +// PrintComments prints any comments from the source .proto file. +// The path is a comma-separated list of integers. +// It returns an indication of whether any comments were printed. +// See descriptor.proto for its format. +func (g *Generator) PrintComments(path string) bool { + if !g.writeOutput { + return false + } + if loc, ok := g.file.comments[path]; ok { + text := strings.TrimSuffix(loc.GetLeadingComments(), "\n") + for _, line := range strings.Split(text, "\n") { + g.P("// ", strings.TrimPrefix(line, " ")) + } + return true + } + return false +} + +func (g *Generator) fileByName(filename string) *FileDescriptor { + return g.allFilesByName[filename] +} + +// weak returns whether the ith import of the current file is a weak import. +func (g *Generator) weak(i int32) bool { + for _, j := range g.file.WeakDependency { + if j == i { + return true + } + } + return false +} + +// Generate the imports +func (g *Generator) generateImports() { + // We almost always need a proto import. Rather than computing when we + // do, which is tricky when there's a plugin, just import it and + // reference it later. The same argument applies to the fmt and math packages. + g.P("import " + g.Pkg["proto"] + " " + strconv.Quote(g.ImportPrefix+"github.com/golang/protobuf/proto")) + g.P("import " + g.Pkg["fmt"] + ` "fmt"`) + g.P("import " + g.Pkg["math"] + ` "math"`) + for i, s := range g.file.Dependency { + fd := g.fileByName(s) + // Do not import our own package. + if fd.PackageName() == g.packageName { + continue + } + filename := fd.goFileName() + // By default, import path is the dirname of the Go filename. + importPath := path.Dir(filename) + if substitution, ok := g.ImportMap[s]; ok { + importPath = substitution + } + importPath = g.ImportPrefix + importPath + // Skip weak imports. + if g.weak(int32(i)) { + g.P("// skipping weak import ", fd.PackageName(), " ", strconv.Quote(importPath)) + continue + } + // We need to import all the dependencies, even if we don't reference them, + // because other code and tools depend on having the full transitive closure + // of protocol buffer types in the binary. + pname := fd.PackageName() + if _, ok := g.usedPackages[pname]; !ok { + pname = "_" + } + g.P("import ", pname, " ", strconv.Quote(importPath)) + } + g.P() + // TODO: may need to worry about uniqueness across plugins + for _, p := range plugins { + p.GenerateImports(g.file) + g.P() + } + g.P("// Reference imports to suppress errors if they are not otherwise used.") + g.P("var _ = ", g.Pkg["proto"], ".Marshal") + g.P("var _ = ", g.Pkg["fmt"], ".Errorf") + g.P("var _ = ", g.Pkg["math"], ".Inf") + g.P() +} + +func (g *Generator) generateImported(id *ImportedDescriptor) { + // Don't generate public import symbols for files that we are generating + // code for, since those symbols will already be in this package. + // We can't simply avoid creating the ImportedDescriptor objects, + // because g.genFiles isn't populated at that stage. + tn := id.TypeName() + sn := tn[len(tn)-1] + df := g.FileOf(id.o.File()) + filename := *df.Name + for _, fd := range g.genFiles { + if *fd.Name == filename { + g.P("// Ignoring public import of ", sn, " from ", filename) + g.P() + return + } + } + g.P("// ", sn, " from public import ", filename) + g.usedPackages[df.PackageName()] = true + + for _, sym := range df.exported[id.o] { + sym.GenerateAlias(g, df.PackageName()) + } + + g.P() +} + +// Generate the enum definitions for this EnumDescriptor. +func (g *Generator) generateEnum(enum *EnumDescriptor) { + // The full type name + typeName := enum.TypeName() + // The full type name, CamelCased. + ccTypeName := CamelCaseSlice(typeName) + ccPrefix := enum.prefix() + + g.PrintComments(enum.path) + g.P("type ", ccTypeName, " int32") + g.file.addExport(enum, enumSymbol{ccTypeName, enum.proto3()}) + g.P("const (") + g.In() + for i, e := range enum.Value { + g.PrintComments(fmt.Sprintf("%s,%d,%d", enum.path, enumValuePath, i)) + + name := ccPrefix + *e.Name + g.P(name, " ", ccTypeName, " = ", e.Number) + g.file.addExport(enum, constOrVarSymbol{name, "const", ccTypeName}) + } + g.Out() + g.P(")") + g.P("var ", ccTypeName, "_name = map[int32]string{") + g.In() + generated := make(map[int32]bool) // avoid duplicate values + for _, e := range enum.Value { + duplicate := "" + if _, present := generated[*e.Number]; present { + duplicate = "// Duplicate value: " + } + g.P(duplicate, e.Number, ": ", strconv.Quote(*e.Name), ",") + generated[*e.Number] = true + } + g.Out() + g.P("}") + g.P("var ", ccTypeName, "_value = map[string]int32{") + g.In() + for _, e := range enum.Value { + g.P(strconv.Quote(*e.Name), ": ", e.Number, ",") + } + g.Out() + g.P("}") + + if !enum.proto3() { + g.P("func (x ", ccTypeName, ") Enum() *", ccTypeName, " {") + g.In() + g.P("p := new(", ccTypeName, ")") + g.P("*p = x") + g.P("return p") + g.Out() + g.P("}") + } + + g.P("func (x ", ccTypeName, ") String() string {") + g.In() + g.P("return ", g.Pkg["proto"], ".EnumName(", ccTypeName, "_name, int32(x))") + g.Out() + g.P("}") + + if !enum.proto3() { + g.P("func (x *", ccTypeName, ") UnmarshalJSON(data []byte) error {") + g.In() + g.P("value, err := ", g.Pkg["proto"], ".UnmarshalJSONEnum(", ccTypeName, `_value, data, "`, ccTypeName, `")`) + g.P("if err != nil {") + g.In() + g.P("return err") + g.Out() + g.P("}") + g.P("*x = ", ccTypeName, "(value)") + g.P("return nil") + g.Out() + g.P("}") + } + + var indexes []string + for m := enum.parent; m != nil; m = m.parent { + // XXX: skip groups? + indexes = append([]string{strconv.Itoa(m.index)}, indexes...) + } + indexes = append(indexes, strconv.Itoa(enum.index)) + g.P("func (", ccTypeName, ") EnumDescriptor() ([]byte, []int) { return ", g.file.VarName(), ", []int{", strings.Join(indexes, ", "), "} }") + if enum.file.GetPackage() == "google.protobuf" && enum.GetName() == "NullValue" { + g.P("func (", ccTypeName, `) XXX_WellKnownType() string { return "`, enum.GetName(), `" }`) + } + + g.P() +} + +// The tag is a string like "varint,2,opt,name=fieldname,def=7" that +// identifies details of the field for the protocol buffer marshaling and unmarshaling +// code. The fields are: +// wire encoding +// protocol tag number +// opt,req,rep for optional, required, or repeated +// packed whether the encoding is "packed" (optional; repeated primitives only) +// name= the original declared name +// enum= the name of the enum type if it is an enum-typed field. +// proto3 if this field is in a proto3 message +// def= string representation of the default value, if any. +// The default value must be in a representation that can be used at run-time +// to generate the default value. Thus bools become 0 and 1, for instance. +func (g *Generator) goTag(message *Descriptor, field *descriptor.FieldDescriptorProto, wiretype string) string { + optrepreq := "" + switch { + case isOptional(field): + optrepreq = "opt" + case isRequired(field): + optrepreq = "req" + case isRepeated(field): + optrepreq = "rep" + } + var defaultValue string + if dv := field.DefaultValue; dv != nil { // set means an explicit default + defaultValue = *dv + // Some types need tweaking. + switch *field.Type { + case descriptor.FieldDescriptorProto_TYPE_BOOL: + if defaultValue == "true" { + defaultValue = "1" + } else { + defaultValue = "0" + } + case descriptor.FieldDescriptorProto_TYPE_STRING, + descriptor.FieldDescriptorProto_TYPE_BYTES: + // Nothing to do. Quoting is done for the whole tag. + case descriptor.FieldDescriptorProto_TYPE_ENUM: + // For enums we need to provide the integer constant. + obj := g.ObjectNamed(field.GetTypeName()) + if id, ok := obj.(*ImportedDescriptor); ok { + // It is an enum that was publicly imported. + // We need the underlying type. + obj = id.o + } + enum, ok := obj.(*EnumDescriptor) + if !ok { + log.Printf("obj is a %T", obj) + if id, ok := obj.(*ImportedDescriptor); ok { + log.Printf("id.o is a %T", id.o) + } + g.Fail("unknown enum type", CamelCaseSlice(obj.TypeName())) + } + defaultValue = enum.integerValueAsString(defaultValue) + } + defaultValue = ",def=" + defaultValue + } + enum := "" + if *field.Type == descriptor.FieldDescriptorProto_TYPE_ENUM { + // We avoid using obj.PackageName(), because we want to use the + // original (proto-world) package name. + obj := g.ObjectNamed(field.GetTypeName()) + if id, ok := obj.(*ImportedDescriptor); ok { + obj = id.o + } + enum = ",enum=" + if pkg := obj.File().GetPackage(); pkg != "" { + enum += pkg + "." + } + enum += CamelCaseSlice(obj.TypeName()) + } + packed := "" + if (field.Options != nil && field.Options.GetPacked()) || + // Per https://developers.google.com/protocol-buffers/docs/proto3#simple: + // "In proto3, repeated fields of scalar numeric types use packed encoding by default." + (message.proto3() && (field.Options == nil || field.Options.Packed == nil) && + isRepeated(field) && isScalar(field)) { + packed = ",packed" + } + fieldName := field.GetName() + name := fieldName + if *field.Type == descriptor.FieldDescriptorProto_TYPE_GROUP { + // We must use the type name for groups instead of + // the field name to preserve capitalization. + // type_name in FieldDescriptorProto is fully-qualified, + // but we only want the local part. + name = *field.TypeName + if i := strings.LastIndex(name, "."); i >= 0 { + name = name[i+1:] + } + } + if json := field.GetJsonName(); json != "" && json != name { + // TODO: escaping might be needed, in which case + // perhaps this should be in its own "json" tag. + name += ",json=" + json + } + name = ",name=" + name + if message.proto3() { + // We only need the extra tag for []byte fields; + // no need to add noise for the others. + if *field.Type == descriptor.FieldDescriptorProto_TYPE_BYTES { + name += ",proto3" + } + + } + oneof := "" + if field.OneofIndex != nil { + oneof = ",oneof" + } + return strconv.Quote(fmt.Sprintf("%s,%d,%s%s%s%s%s%s", + wiretype, + field.GetNumber(), + optrepreq, + packed, + name, + enum, + oneof, + defaultValue)) +} + +func needsStar(typ descriptor.FieldDescriptorProto_Type) bool { + switch typ { + case descriptor.FieldDescriptorProto_TYPE_GROUP: + return false + case descriptor.FieldDescriptorProto_TYPE_MESSAGE: + return false + case descriptor.FieldDescriptorProto_TYPE_BYTES: + return false + } + return true +} + +// TypeName is the printed name appropriate for an item. If the object is in the current file, +// TypeName drops the package name and underscores the rest. +// Otherwise the object is from another package; and the result is the underscored +// package name followed by the item name. +// The result always has an initial capital. +func (g *Generator) TypeName(obj Object) string { + return g.DefaultPackageName(obj) + CamelCaseSlice(obj.TypeName()) +} + +// TypeNameWithPackage is like TypeName, but always includes the package +// name even if the object is in our own package. +func (g *Generator) TypeNameWithPackage(obj Object) string { + return obj.PackageName() + CamelCaseSlice(obj.TypeName()) +} + +// GoType returns a string representing the type name, and the wire type +func (g *Generator) GoType(message *Descriptor, field *descriptor.FieldDescriptorProto) (typ string, wire string) { + // TODO: Options. + switch *field.Type { + case descriptor.FieldDescriptorProto_TYPE_DOUBLE: + typ, wire = "float64", "fixed64" + case descriptor.FieldDescriptorProto_TYPE_FLOAT: + typ, wire = "float32", "fixed32" + case descriptor.FieldDescriptorProto_TYPE_INT64: + typ, wire = "int64", "varint" + case descriptor.FieldDescriptorProto_TYPE_UINT64: + typ, wire = "uint64", "varint" + case descriptor.FieldDescriptorProto_TYPE_INT32: + typ, wire = "int32", "varint" + case descriptor.FieldDescriptorProto_TYPE_UINT32: + typ, wire = "uint32", "varint" + case descriptor.FieldDescriptorProto_TYPE_FIXED64: + typ, wire = "uint64", "fixed64" + case descriptor.FieldDescriptorProto_TYPE_FIXED32: + typ, wire = "uint32", "fixed32" + case descriptor.FieldDescriptorProto_TYPE_BOOL: + typ, wire = "bool", "varint" + case descriptor.FieldDescriptorProto_TYPE_STRING: + typ, wire = "string", "bytes" + case descriptor.FieldDescriptorProto_TYPE_GROUP: + desc := g.ObjectNamed(field.GetTypeName()) + typ, wire = "*"+g.TypeName(desc), "group" + case descriptor.FieldDescriptorProto_TYPE_MESSAGE: + desc := g.ObjectNamed(field.GetTypeName()) + typ, wire = "*"+g.TypeName(desc), "bytes" + case descriptor.FieldDescriptorProto_TYPE_BYTES: + typ, wire = "[]byte", "bytes" + case descriptor.FieldDescriptorProto_TYPE_ENUM: + desc := g.ObjectNamed(field.GetTypeName()) + typ, wire = g.TypeName(desc), "varint" + case descriptor.FieldDescriptorProto_TYPE_SFIXED32: + typ, wire = "int32", "fixed32" + case descriptor.FieldDescriptorProto_TYPE_SFIXED64: + typ, wire = "int64", "fixed64" + case descriptor.FieldDescriptorProto_TYPE_SINT32: + typ, wire = "int32", "zigzag32" + case descriptor.FieldDescriptorProto_TYPE_SINT64: + typ, wire = "int64", "zigzag64" + default: + g.Fail("unknown type for", field.GetName()) + } + if isRepeated(field) { + typ = "[]" + typ + } else if message != nil && message.proto3() { + return + } else if field.OneofIndex != nil && message != nil { + return + } else if needsStar(*field.Type) { + typ = "*" + typ + } + return +} + +func (g *Generator) RecordTypeUse(t string) { + if obj, ok := g.typeNameToObject[t]; ok { + // Call ObjectNamed to get the true object to record the use. + obj = g.ObjectNamed(t) + g.usedPackages[obj.PackageName()] = true + } +} + +// Method names that may be generated. Fields with these names get an +// underscore appended. Any change to this set is a potential incompatible +// API change because it changes generated field names. +var methodNames = [...]string{ + "Reset", + "String", + "ProtoMessage", + "Marshal", + "Unmarshal", + "ExtensionRangeArray", + "ExtensionMap", + "Descriptor", +} + +// Names of messages in the `google.protobuf` package for which +// we will generate XXX_WellKnownType methods. +var wellKnownTypes = map[string]bool{ + "Any": true, + "Duration": true, + "Empty": true, + "Struct": true, + "Timestamp": true, + + "Value": true, + "ListValue": true, + "DoubleValue": true, + "FloatValue": true, + "Int64Value": true, + "UInt64Value": true, + "Int32Value": true, + "UInt32Value": true, + "BoolValue": true, + "StringValue": true, + "BytesValue": true, +} + +// Generate the type and default constant definitions for this Descriptor. +func (g *Generator) generateMessage(message *Descriptor) { + // The full type name + typeName := message.TypeName() + // The full type name, CamelCased. + ccTypeName := CamelCaseSlice(typeName) + + usedNames := make(map[string]bool) + for _, n := range methodNames { + usedNames[n] = true + } + fieldNames := make(map[*descriptor.FieldDescriptorProto]string) + fieldGetterNames := make(map[*descriptor.FieldDescriptorProto]string) + fieldTypes := make(map[*descriptor.FieldDescriptorProto]string) + mapFieldTypes := make(map[*descriptor.FieldDescriptorProto]string) + + oneofFieldName := make(map[int32]string) // indexed by oneof_index field of FieldDescriptorProto + oneofDisc := make(map[int32]string) // name of discriminator method + oneofTypeName := make(map[*descriptor.FieldDescriptorProto]string) // without star + oneofInsertPoints := make(map[int32]int) // oneof_index => offset of g.Buffer + + g.PrintComments(message.path) + g.P("type ", ccTypeName, " struct {") + g.In() + + // allocNames finds a conflict-free variation of the given strings, + // consistently mutating their suffixes. + // It returns the same number of strings. + allocNames := func(ns ...string) []string { + Loop: + for { + for _, n := range ns { + if usedNames[n] { + for i := range ns { + ns[i] += "_" + } + continue Loop + } + } + for _, n := range ns { + usedNames[n] = true + } + return ns + } + } + + for i, field := range message.Field { + // Allocate the getter and the field at the same time so name + // collisions create field/method consistent names. + // TODO: This allocation occurs based on the order of the fields + // in the proto file, meaning that a change in the field + // ordering can change generated Method/Field names. + base := CamelCase(*field.Name) + ns := allocNames(base, "Get"+base) + fieldName, fieldGetterName := ns[0], ns[1] + typename, wiretype := g.GoType(message, field) + jsonName := *field.Name + tag := fmt.Sprintf("protobuf:%s json:%q", g.goTag(message, field, wiretype), jsonName+",omitempty") + + fieldNames[field] = fieldName + fieldGetterNames[field] = fieldGetterName + + oneof := field.OneofIndex != nil + if oneof && oneofFieldName[*field.OneofIndex] == "" { + odp := message.OneofDecl[int(*field.OneofIndex)] + fname := allocNames(CamelCase(odp.GetName()))[0] + + // This is the first field of a oneof we haven't seen before. + // Generate the union field. + com := g.PrintComments(fmt.Sprintf("%s,%d,%d", message.path, messageOneofPath, *field.OneofIndex)) + if com { + g.P("//") + } + g.P("// Types that are valid to be assigned to ", fname, ":") + // Generate the rest of this comment later, + // when we've computed any disambiguation. + oneofInsertPoints[*field.OneofIndex] = g.Buffer.Len() + + dname := "is" + ccTypeName + "_" + fname + oneofFieldName[*field.OneofIndex] = fname + oneofDisc[*field.OneofIndex] = dname + tag := `protobuf_oneof:"` + odp.GetName() + `"` + g.P(fname, " ", dname, " `", tag, "`") + } + + if *field.Type == descriptor.FieldDescriptorProto_TYPE_MESSAGE { + desc := g.ObjectNamed(field.GetTypeName()) + if d, ok := desc.(*Descriptor); ok && d.GetOptions().GetMapEntry() { + // Figure out the Go types and tags for the key and value types. + keyField, valField := d.Field[0], d.Field[1] + keyType, keyWire := g.GoType(d, keyField) + valType, valWire := g.GoType(d, valField) + keyTag, valTag := g.goTag(d, keyField, keyWire), g.goTag(d, valField, valWire) + + // We don't use stars, except for message-typed values. + // Message and enum types are the only two possibly foreign types used in maps, + // so record their use. They are not permitted as map keys. + keyType = strings.TrimPrefix(keyType, "*") + switch *valField.Type { + case descriptor.FieldDescriptorProto_TYPE_ENUM: + valType = strings.TrimPrefix(valType, "*") + g.RecordTypeUse(valField.GetTypeName()) + case descriptor.FieldDescriptorProto_TYPE_MESSAGE: + g.RecordTypeUse(valField.GetTypeName()) + default: + valType = strings.TrimPrefix(valType, "*") + } + + typename = fmt.Sprintf("map[%s]%s", keyType, valType) + mapFieldTypes[field] = typename // record for the getter generation + + tag += fmt.Sprintf(" protobuf_key:%s protobuf_val:%s", keyTag, valTag) + } + } + + fieldTypes[field] = typename + + if oneof { + tname := ccTypeName + "_" + fieldName + // It is possible for this to collide with a message or enum + // nested in this message. Check for collisions. + for { + ok := true + for _, desc := range message.nested { + if CamelCaseSlice(desc.TypeName()) == tname { + ok = false + break + } + } + for _, enum := range message.enums { + if CamelCaseSlice(enum.TypeName()) == tname { + ok = false + break + } + } + if !ok { + tname += "_" + continue + } + break + } + + oneofTypeName[field] = tname + continue + } + + g.PrintComments(fmt.Sprintf("%s,%d,%d", message.path, messageFieldPath, i)) + g.P(fieldName, "\t", typename, "\t`", tag, "`") + g.RecordTypeUse(field.GetTypeName()) + } + if len(message.ExtensionRange) > 0 { + g.P(g.Pkg["proto"], ".XXX_InternalExtensions `json:\"-\"`") + } + if !message.proto3() { + g.P("XXX_unrecognized\t[]byte `json:\"-\"`") + } + g.Out() + g.P("}") + + // Update g.Buffer to list valid oneof types. + // We do this down here, after we've disambiguated the oneof type names. + // We go in reverse order of insertion point to avoid invalidating offsets. + for oi := int32(len(message.OneofDecl)); oi >= 0; oi-- { + ip := oneofInsertPoints[oi] + all := g.Buffer.Bytes() + rem := all[ip:] + g.Buffer = bytes.NewBuffer(all[:ip:ip]) // set cap so we don't scribble on rem + for _, field := range message.Field { + if field.OneofIndex == nil || *field.OneofIndex != oi { + continue + } + g.P("//\t*", oneofTypeName[field]) + } + g.Buffer.Write(rem) + } + + // Reset, String and ProtoMessage methods. + g.P("func (m *", ccTypeName, ") Reset() { *m = ", ccTypeName, "{} }") + g.P("func (m *", ccTypeName, ") String() string { return ", g.Pkg["proto"], ".CompactTextString(m) }") + g.P("func (*", ccTypeName, ") ProtoMessage() {}") + var indexes []string + for m := message; m != nil; m = m.parent { + indexes = append([]string{strconv.Itoa(m.index)}, indexes...) + } + g.P("func (*", ccTypeName, ") Descriptor() ([]byte, []int) { return ", g.file.VarName(), ", []int{", strings.Join(indexes, ", "), "} }") + // TODO: Revisit the decision to use a XXX_WellKnownType method + // if we change proto.MessageName to work with multiple equivalents. + if message.file.GetPackage() == "google.protobuf" && wellKnownTypes[message.GetName()] { + g.P("func (*", ccTypeName, `) XXX_WellKnownType() string { return "`, message.GetName(), `" }`) + } + + // Extension support methods + var hasExtensions, isMessageSet bool + if len(message.ExtensionRange) > 0 { + hasExtensions = true + // message_set_wire_format only makes sense when extensions are defined. + if opts := message.Options; opts != nil && opts.GetMessageSetWireFormat() { + isMessageSet = true + g.P() + g.P("func (m *", ccTypeName, ") Marshal() ([]byte, error) {") + g.In() + g.P("return ", g.Pkg["proto"], ".MarshalMessageSet(&m.XXX_InternalExtensions)") + g.Out() + g.P("}") + g.P("func (m *", ccTypeName, ") Unmarshal(buf []byte) error {") + g.In() + g.P("return ", g.Pkg["proto"], ".UnmarshalMessageSet(buf, &m.XXX_InternalExtensions)") + g.Out() + g.P("}") + g.P("func (m *", ccTypeName, ") MarshalJSON() ([]byte, error) {") + g.In() + g.P("return ", g.Pkg["proto"], ".MarshalMessageSetJSON(&m.XXX_InternalExtensions)") + g.Out() + g.P("}") + g.P("func (m *", ccTypeName, ") UnmarshalJSON(buf []byte) error {") + g.In() + g.P("return ", g.Pkg["proto"], ".UnmarshalMessageSetJSON(buf, &m.XXX_InternalExtensions)") + g.Out() + g.P("}") + g.P("// ensure ", ccTypeName, " satisfies proto.Marshaler and proto.Unmarshaler") + g.P("var _ ", g.Pkg["proto"], ".Marshaler = (*", ccTypeName, ")(nil)") + g.P("var _ ", g.Pkg["proto"], ".Unmarshaler = (*", ccTypeName, ")(nil)") + } + + g.P() + g.P("var extRange_", ccTypeName, " = []", g.Pkg["proto"], ".ExtensionRange{") + g.In() + for _, r := range message.ExtensionRange { + end := fmt.Sprint(*r.End - 1) // make range inclusive on both ends + g.P("{", r.Start, ", ", end, "},") + } + g.Out() + g.P("}") + g.P("func (*", ccTypeName, ") ExtensionRangeArray() []", g.Pkg["proto"], ".ExtensionRange {") + g.In() + g.P("return extRange_", ccTypeName) + g.Out() + g.P("}") + } + + // Default constants + defNames := make(map[*descriptor.FieldDescriptorProto]string) + for _, field := range message.Field { + def := field.GetDefaultValue() + if def == "" { + continue + } + fieldname := "Default_" + ccTypeName + "_" + CamelCase(*field.Name) + defNames[field] = fieldname + typename, _ := g.GoType(message, field) + if typename[0] == '*' { + typename = typename[1:] + } + kind := "const " + switch { + case typename == "bool": + case typename == "string": + def = strconv.Quote(def) + case typename == "[]byte": + def = "[]byte(" + strconv.Quote(unescape(def)) + ")" + kind = "var " + case def == "inf", def == "-inf", def == "nan": + // These names are known to, and defined by, the protocol language. + switch def { + case "inf": + def = "math.Inf(1)" + case "-inf": + def = "math.Inf(-1)" + case "nan": + def = "math.NaN()" + } + if *field.Type == descriptor.FieldDescriptorProto_TYPE_FLOAT { + def = "float32(" + def + ")" + } + kind = "var " + case *field.Type == descriptor.FieldDescriptorProto_TYPE_ENUM: + // Must be an enum. Need to construct the prefixed name. + obj := g.ObjectNamed(field.GetTypeName()) + var enum *EnumDescriptor + if id, ok := obj.(*ImportedDescriptor); ok { + // The enum type has been publicly imported. + enum, _ = id.o.(*EnumDescriptor) + } else { + enum, _ = obj.(*EnumDescriptor) + } + if enum == nil { + log.Printf("don't know how to generate constant for %s", fieldname) + continue + } + def = g.DefaultPackageName(obj) + enum.prefix() + def + } + g.P(kind, fieldname, " ", typename, " = ", def) + g.file.addExport(message, constOrVarSymbol{fieldname, kind, ""}) + } + g.P() + + // Oneof per-field types, discriminants and getters. + // + // Generate unexported named types for the discriminant interfaces. + // We shouldn't have to do this, but there was (~19 Aug 2015) a compiler/linker bug + // that was triggered by using anonymous interfaces here. + // TODO: Revisit this and consider reverting back to anonymous interfaces. + for oi := range message.OneofDecl { + dname := oneofDisc[int32(oi)] + g.P("type ", dname, " interface { ", dname, "() }") + } + g.P() + for _, field := range message.Field { + if field.OneofIndex == nil { + continue + } + _, wiretype := g.GoType(message, field) + tag := "protobuf:" + g.goTag(message, field, wiretype) + g.P("type ", oneofTypeName[field], " struct{ ", fieldNames[field], " ", fieldTypes[field], " `", tag, "` }") + g.RecordTypeUse(field.GetTypeName()) + } + g.P() + for _, field := range message.Field { + if field.OneofIndex == nil { + continue + } + g.P("func (*", oneofTypeName[field], ") ", oneofDisc[*field.OneofIndex], "() {}") + } + g.P() + for oi := range message.OneofDecl { + fname := oneofFieldName[int32(oi)] + g.P("func (m *", ccTypeName, ") Get", fname, "() ", oneofDisc[int32(oi)], " {") + g.P("if m != nil { return m.", fname, " }") + g.P("return nil") + g.P("}") + } + g.P() + + // Field getters + var getters []getterSymbol + for _, field := range message.Field { + oneof := field.OneofIndex != nil + + fname := fieldNames[field] + typename, _ := g.GoType(message, field) + if t, ok := mapFieldTypes[field]; ok { + typename = t + } + mname := fieldGetterNames[field] + star := "" + if needsStar(*field.Type) && typename[0] == '*' { + typename = typename[1:] + star = "*" + } + + // Only export getter symbols for basic types, + // and for messages and enums in the same package. + // Groups are not exported. + // Foreign types can't be hoisted through a public import because + // the importer may not already be importing the defining .proto. + // As an example, imagine we have an import tree like this: + // A.proto -> B.proto -> C.proto + // If A publicly imports B, we need to generate the getters from B in A's output, + // but if one such getter returns something from C then we cannot do that + // because A is not importing C already. + var getter, genType bool + switch *field.Type { + case descriptor.FieldDescriptorProto_TYPE_GROUP: + getter = false + case descriptor.FieldDescriptorProto_TYPE_MESSAGE, descriptor.FieldDescriptorProto_TYPE_ENUM: + // Only export getter if its return type is in this package. + getter = g.ObjectNamed(field.GetTypeName()).PackageName() == message.PackageName() + genType = true + default: + getter = true + } + if getter { + getters = append(getters, getterSymbol{ + name: mname, + typ: typename, + typeName: field.GetTypeName(), + genType: genType, + }) + } + + g.P("func (m *", ccTypeName, ") "+mname+"() "+typename+" {") + g.In() + def, hasDef := defNames[field] + typeDefaultIsNil := false // whether this field type's default value is a literal nil unless specified + switch *field.Type { + case descriptor.FieldDescriptorProto_TYPE_BYTES: + typeDefaultIsNil = !hasDef + case descriptor.FieldDescriptorProto_TYPE_GROUP, descriptor.FieldDescriptorProto_TYPE_MESSAGE: + typeDefaultIsNil = true + } + if isRepeated(field) { + typeDefaultIsNil = true + } + if typeDefaultIsNil && !oneof { + // A bytes field with no explicit default needs less generated code, + // as does a message or group field, or a repeated field. + g.P("if m != nil {") + g.In() + g.P("return m." + fname) + g.Out() + g.P("}") + g.P("return nil") + g.Out() + g.P("}") + g.P() + continue + } + if !oneof { + if message.proto3() { + g.P("if m != nil {") + } else { + g.P("if m != nil && m." + fname + " != nil {") + } + g.In() + g.P("return " + star + "m." + fname) + g.Out() + g.P("}") + } else { + uname := oneofFieldName[*field.OneofIndex] + tname := oneofTypeName[field] + g.P("if x, ok := m.Get", uname, "().(*", tname, "); ok {") + g.P("return x.", fname) + g.P("}") + } + if hasDef { + if *field.Type != descriptor.FieldDescriptorProto_TYPE_BYTES { + g.P("return " + def) + } else { + // The default is a []byte var. + // Make a copy when returning it to be safe. + g.P("return append([]byte(nil), ", def, "...)") + } + } else { + switch *field.Type { + case descriptor.FieldDescriptorProto_TYPE_BOOL: + g.P("return false") + case descriptor.FieldDescriptorProto_TYPE_STRING: + g.P(`return ""`) + case descriptor.FieldDescriptorProto_TYPE_GROUP, + descriptor.FieldDescriptorProto_TYPE_MESSAGE, + descriptor.FieldDescriptorProto_TYPE_BYTES: + // This is only possible for oneof fields. + g.P("return nil") + case descriptor.FieldDescriptorProto_TYPE_ENUM: + // The default default for an enum is the first value in the enum, + // not zero. + obj := g.ObjectNamed(field.GetTypeName()) + var enum *EnumDescriptor + if id, ok := obj.(*ImportedDescriptor); ok { + // The enum type has been publicly imported. + enum, _ = id.o.(*EnumDescriptor) + } else { + enum, _ = obj.(*EnumDescriptor) + } + if enum == nil { + log.Printf("don't know how to generate getter for %s", field.GetName()) + continue + } + if len(enum.Value) == 0 { + g.P("return 0 // empty enum") + } else { + first := enum.Value[0].GetName() + g.P("return ", g.DefaultPackageName(obj)+enum.prefix()+first) + } + default: + g.P("return 0") + } + } + g.Out() + g.P("}") + g.P() + } + + if !message.group { + ms := &messageSymbol{ + sym: ccTypeName, + hasExtensions: hasExtensions, + isMessageSet: isMessageSet, + hasOneof: len(message.OneofDecl) > 0, + getters: getters, + } + g.file.addExport(message, ms) + } + + // Oneof functions + if len(message.OneofDecl) > 0 { + fieldWire := make(map[*descriptor.FieldDescriptorProto]string) + + // method + enc := "_" + ccTypeName + "_OneofMarshaler" + dec := "_" + ccTypeName + "_OneofUnmarshaler" + size := "_" + ccTypeName + "_OneofSizer" + encSig := "(msg " + g.Pkg["proto"] + ".Message, b *" + g.Pkg["proto"] + ".Buffer) error" + decSig := "(msg " + g.Pkg["proto"] + ".Message, tag, wire int, b *" + g.Pkg["proto"] + ".Buffer) (bool, error)" + sizeSig := "(msg " + g.Pkg["proto"] + ".Message) (n int)" + + g.P("// XXX_OneofFuncs is for the internal use of the proto package.") + g.P("func (*", ccTypeName, ") XXX_OneofFuncs() (func", encSig, ", func", decSig, ", func", sizeSig, ", []interface{}) {") + g.P("return ", enc, ", ", dec, ", ", size, ", []interface{}{") + for _, field := range message.Field { + if field.OneofIndex == nil { + continue + } + g.P("(*", oneofTypeName[field], ")(nil),") + } + g.P("}") + g.P("}") + g.P() + + // marshaler + g.P("func ", enc, encSig, " {") + g.P("m := msg.(*", ccTypeName, ")") + for oi, odp := range message.OneofDecl { + g.P("// ", odp.GetName()) + fname := oneofFieldName[int32(oi)] + g.P("switch x := m.", fname, ".(type) {") + for _, field := range message.Field { + if field.OneofIndex == nil || int(*field.OneofIndex) != oi { + continue + } + g.P("case *", oneofTypeName[field], ":") + var wire, pre, post string + val := "x." + fieldNames[field] // overridden for TYPE_BOOL + canFail := false // only TYPE_MESSAGE and TYPE_GROUP can fail + switch *field.Type { + case descriptor.FieldDescriptorProto_TYPE_DOUBLE: + wire = "WireFixed64" + pre = "b.EncodeFixed64(" + g.Pkg["math"] + ".Float64bits(" + post = "))" + case descriptor.FieldDescriptorProto_TYPE_FLOAT: + wire = "WireFixed32" + pre = "b.EncodeFixed32(uint64(" + g.Pkg["math"] + ".Float32bits(" + post = ")))" + case descriptor.FieldDescriptorProto_TYPE_INT64, + descriptor.FieldDescriptorProto_TYPE_UINT64: + wire = "WireVarint" + pre, post = "b.EncodeVarint(uint64(", "))" + case descriptor.FieldDescriptorProto_TYPE_INT32, + descriptor.FieldDescriptorProto_TYPE_UINT32, + descriptor.FieldDescriptorProto_TYPE_ENUM: + wire = "WireVarint" + pre, post = "b.EncodeVarint(uint64(", "))" + case descriptor.FieldDescriptorProto_TYPE_FIXED64, + descriptor.FieldDescriptorProto_TYPE_SFIXED64: + wire = "WireFixed64" + pre, post = "b.EncodeFixed64(uint64(", "))" + case descriptor.FieldDescriptorProto_TYPE_FIXED32, + descriptor.FieldDescriptorProto_TYPE_SFIXED32: + wire = "WireFixed32" + pre, post = "b.EncodeFixed32(uint64(", "))" + case descriptor.FieldDescriptorProto_TYPE_BOOL: + // bool needs special handling. + g.P("t := uint64(0)") + g.P("if ", val, " { t = 1 }") + val = "t" + wire = "WireVarint" + pre, post = "b.EncodeVarint(", ")" + case descriptor.FieldDescriptorProto_TYPE_STRING: + wire = "WireBytes" + pre, post = "b.EncodeStringBytes(", ")" + case descriptor.FieldDescriptorProto_TYPE_GROUP: + wire = "WireStartGroup" + pre, post = "b.Marshal(", ")" + canFail = true + case descriptor.FieldDescriptorProto_TYPE_MESSAGE: + wire = "WireBytes" + pre, post = "b.EncodeMessage(", ")" + canFail = true + case descriptor.FieldDescriptorProto_TYPE_BYTES: + wire = "WireBytes" + pre, post = "b.EncodeRawBytes(", ")" + case descriptor.FieldDescriptorProto_TYPE_SINT32: + wire = "WireVarint" + pre, post = "b.EncodeZigzag32(uint64(", "))" + case descriptor.FieldDescriptorProto_TYPE_SINT64: + wire = "WireVarint" + pre, post = "b.EncodeZigzag64(uint64(", "))" + default: + g.Fail("unhandled oneof field type ", field.Type.String()) + } + fieldWire[field] = wire + g.P("b.EncodeVarint(", field.Number, "<<3|", g.Pkg["proto"], ".", wire, ")") + if !canFail { + g.P(pre, val, post) + } else { + g.P("if err := ", pre, val, post, "; err != nil {") + g.P("return err") + g.P("}") + } + if *field.Type == descriptor.FieldDescriptorProto_TYPE_GROUP { + g.P("b.EncodeVarint(", field.Number, "<<3|", g.Pkg["proto"], ".WireEndGroup)") + } + } + g.P("case nil:") + g.P("default: return ", g.Pkg["fmt"], `.Errorf("`, ccTypeName, ".", fname, ` has unexpected type %T", x)`) + g.P("}") + } + g.P("return nil") + g.P("}") + g.P() + + // unmarshaler + g.P("func ", dec, decSig, " {") + g.P("m := msg.(*", ccTypeName, ")") + g.P("switch tag {") + for _, field := range message.Field { + if field.OneofIndex == nil { + continue + } + odp := message.OneofDecl[int(*field.OneofIndex)] + g.P("case ", field.Number, ": // ", odp.GetName(), ".", *field.Name) + g.P("if wire != ", g.Pkg["proto"], ".", fieldWire[field], " {") + g.P("return true, ", g.Pkg["proto"], ".ErrInternalBadWireType") + g.P("}") + lhs := "x, err" // overridden for TYPE_MESSAGE and TYPE_GROUP + var dec, cast, cast2 string + switch *field.Type { + case descriptor.FieldDescriptorProto_TYPE_DOUBLE: + dec, cast = "b.DecodeFixed64()", g.Pkg["math"]+".Float64frombits" + case descriptor.FieldDescriptorProto_TYPE_FLOAT: + dec, cast, cast2 = "b.DecodeFixed32()", "uint32", g.Pkg["math"]+".Float32frombits" + case descriptor.FieldDescriptorProto_TYPE_INT64: + dec, cast = "b.DecodeVarint()", "int64" + case descriptor.FieldDescriptorProto_TYPE_UINT64: + dec = "b.DecodeVarint()" + case descriptor.FieldDescriptorProto_TYPE_INT32: + dec, cast = "b.DecodeVarint()", "int32" + case descriptor.FieldDescriptorProto_TYPE_FIXED64: + dec = "b.DecodeFixed64()" + case descriptor.FieldDescriptorProto_TYPE_FIXED32: + dec, cast = "b.DecodeFixed32()", "uint32" + case descriptor.FieldDescriptorProto_TYPE_BOOL: + dec = "b.DecodeVarint()" + // handled specially below + case descriptor.FieldDescriptorProto_TYPE_STRING: + dec = "b.DecodeStringBytes()" + case descriptor.FieldDescriptorProto_TYPE_GROUP: + g.P("msg := new(", fieldTypes[field][1:], ")") // drop star + lhs = "err" + dec = "b.DecodeGroup(msg)" + // handled specially below + case descriptor.FieldDescriptorProto_TYPE_MESSAGE: + g.P("msg := new(", fieldTypes[field][1:], ")") // drop star + lhs = "err" + dec = "b.DecodeMessage(msg)" + // handled specially below + case descriptor.FieldDescriptorProto_TYPE_BYTES: + dec = "b.DecodeRawBytes(true)" + case descriptor.FieldDescriptorProto_TYPE_UINT32: + dec, cast = "b.DecodeVarint()", "uint32" + case descriptor.FieldDescriptorProto_TYPE_ENUM: + dec, cast = "b.DecodeVarint()", fieldTypes[field] + case descriptor.FieldDescriptorProto_TYPE_SFIXED32: + dec, cast = "b.DecodeFixed32()", "int32" + case descriptor.FieldDescriptorProto_TYPE_SFIXED64: + dec, cast = "b.DecodeFixed64()", "int64" + case descriptor.FieldDescriptorProto_TYPE_SINT32: + dec, cast = "b.DecodeZigzag32()", "int32" + case descriptor.FieldDescriptorProto_TYPE_SINT64: + dec, cast = "b.DecodeZigzag64()", "int64" + default: + g.Fail("unhandled oneof field type ", field.Type.String()) + } + g.P(lhs, " := ", dec) + val := "x" + if cast != "" { + val = cast + "(" + val + ")" + } + if cast2 != "" { + val = cast2 + "(" + val + ")" + } + switch *field.Type { + case descriptor.FieldDescriptorProto_TYPE_BOOL: + val += " != 0" + case descriptor.FieldDescriptorProto_TYPE_GROUP, + descriptor.FieldDescriptorProto_TYPE_MESSAGE: + val = "msg" + } + g.P("m.", oneofFieldName[*field.OneofIndex], " = &", oneofTypeName[field], "{", val, "}") + g.P("return true, err") + } + g.P("default: return false, nil") + g.P("}") + g.P("}") + g.P() + + // sizer + g.P("func ", size, sizeSig, " {") + g.P("m := msg.(*", ccTypeName, ")") + for oi, odp := range message.OneofDecl { + g.P("// ", odp.GetName()) + fname := oneofFieldName[int32(oi)] + g.P("switch x := m.", fname, ".(type) {") + for _, field := range message.Field { + if field.OneofIndex == nil || int(*field.OneofIndex) != oi { + continue + } + g.P("case *", oneofTypeName[field], ":") + val := "x." + fieldNames[field] + var wire, varint, fixed string + switch *field.Type { + case descriptor.FieldDescriptorProto_TYPE_DOUBLE: + wire = "WireFixed64" + fixed = "8" + case descriptor.FieldDescriptorProto_TYPE_FLOAT: + wire = "WireFixed32" + fixed = "4" + case descriptor.FieldDescriptorProto_TYPE_INT64, + descriptor.FieldDescriptorProto_TYPE_UINT64, + descriptor.FieldDescriptorProto_TYPE_INT32, + descriptor.FieldDescriptorProto_TYPE_UINT32, + descriptor.FieldDescriptorProto_TYPE_ENUM: + wire = "WireVarint" + varint = val + case descriptor.FieldDescriptorProto_TYPE_FIXED64, + descriptor.FieldDescriptorProto_TYPE_SFIXED64: + wire = "WireFixed64" + fixed = "8" + case descriptor.FieldDescriptorProto_TYPE_FIXED32, + descriptor.FieldDescriptorProto_TYPE_SFIXED32: + wire = "WireFixed32" + fixed = "4" + case descriptor.FieldDescriptorProto_TYPE_BOOL: + wire = "WireVarint" + fixed = "1" + case descriptor.FieldDescriptorProto_TYPE_STRING: + wire = "WireBytes" + fixed = "len(" + val + ")" + varint = fixed + case descriptor.FieldDescriptorProto_TYPE_GROUP: + wire = "WireStartGroup" + fixed = g.Pkg["proto"] + ".Size(" + val + ")" + case descriptor.FieldDescriptorProto_TYPE_MESSAGE: + wire = "WireBytes" + g.P("s := ", g.Pkg["proto"], ".Size(", val, ")") + fixed = "s" + varint = fixed + case descriptor.FieldDescriptorProto_TYPE_BYTES: + wire = "WireBytes" + fixed = "len(" + val + ")" + varint = fixed + case descriptor.FieldDescriptorProto_TYPE_SINT32: + wire = "WireVarint" + varint = "(uint32(" + val + ") << 1) ^ uint32((int32(" + val + ") >> 31))" + case descriptor.FieldDescriptorProto_TYPE_SINT64: + wire = "WireVarint" + varint = "uint64(" + val + " << 1) ^ uint64((int64(" + val + ") >> 63))" + default: + g.Fail("unhandled oneof field type ", field.Type.String()) + } + g.P("n += ", g.Pkg["proto"], ".SizeVarint(", field.Number, "<<3|", g.Pkg["proto"], ".", wire, ")") + if varint != "" { + g.P("n += ", g.Pkg["proto"], ".SizeVarint(uint64(", varint, "))") + } + if fixed != "" { + g.P("n += ", fixed) + } + if *field.Type == descriptor.FieldDescriptorProto_TYPE_GROUP { + g.P("n += ", g.Pkg["proto"], ".SizeVarint(", field.Number, "<<3|", g.Pkg["proto"], ".WireEndGroup)") + } + } + g.P("case nil:") + g.P("default:") + g.P("panic(", g.Pkg["fmt"], ".Sprintf(\"proto: unexpected type %T in oneof\", x))") + g.P("}") + } + g.P("return n") + g.P("}") + g.P() + } + + for _, ext := range message.ext { + g.generateExtension(ext) + } + + fullName := strings.Join(message.TypeName(), ".") + if g.file.Package != nil { + fullName = *g.file.Package + "." + fullName + } + + g.addInitf("%s.RegisterType((*%s)(nil), %q)", g.Pkg["proto"], ccTypeName, fullName) +} + +var escapeChars = [256]byte{ + 'a': '\a', 'b': '\b', 'f': '\f', 'n': '\n', 'r': '\r', 't': '\t', 'v': '\v', '\\': '\\', '"': '"', '\'': '\'', '?': '?', +} + +// unescape reverses the "C" escaping that protoc does for default values of bytes fields. +// It is best effort in that it effectively ignores malformed input. Seemingly invalid escape +// sequences are conveyed, unmodified, into the decoded result. +func unescape(s string) string { + // NB: Sadly, we can't use strconv.Unquote because protoc will escape both + // single and double quotes, but strconv.Unquote only allows one or the + // other (based on actual surrounding quotes of its input argument). + + var out []byte + for len(s) > 0 { + // regular character, or too short to be valid escape + if s[0] != '\\' || len(s) < 2 { + out = append(out, s[0]) + s = s[1:] + } else if c := escapeChars[s[1]]; c != 0 { + // escape sequence + out = append(out, c) + s = s[2:] + } else if s[1] == 'x' || s[1] == 'X' { + // hex escape, e.g. "\x80 + if len(s) < 4 { + // too short to be valid + out = append(out, s[:2]...) + s = s[2:] + continue + } + v, err := strconv.ParseUint(s[2:4], 16, 8) + if err != nil { + out = append(out, s[:4]...) + } else { + out = append(out, byte(v)) + } + s = s[4:] + } else if '0' <= s[1] && s[1] <= '7' { + // octal escape, can vary from 1 to 3 octal digits; e.g., "\0" "\40" or "\164" + // so consume up to 2 more bytes or up to end-of-string + n := len(s[1:]) - len(strings.TrimLeft(s[1:], "01234567")) + if n > 3 { + n = 3 + } + v, err := strconv.ParseUint(s[1:1+n], 8, 8) + if err != nil { + out = append(out, s[:1+n]...) + } else { + out = append(out, byte(v)) + } + s = s[1+n:] + } else { + // bad escape, just propagate the slash as-is + out = append(out, s[0]) + s = s[1:] + } + } + + return string(out) +} + +func (g *Generator) generateExtension(ext *ExtensionDescriptor) { + ccTypeName := ext.DescName() + + extObj := g.ObjectNamed(*ext.Extendee) + var extDesc *Descriptor + if id, ok := extObj.(*ImportedDescriptor); ok { + // This is extending a publicly imported message. + // We need the underlying type for goTag. + extDesc = id.o.(*Descriptor) + } else { + extDesc = extObj.(*Descriptor) + } + extendedType := "*" + g.TypeName(extObj) // always use the original + field := ext.FieldDescriptorProto + fieldType, wireType := g.GoType(ext.parent, field) + tag := g.goTag(extDesc, field, wireType) + g.RecordTypeUse(*ext.Extendee) + if n := ext.FieldDescriptorProto.TypeName; n != nil { + // foreign extension type + g.RecordTypeUse(*n) + } + + typeName := ext.TypeName() + + // Special case for proto2 message sets: If this extension is extending + // proto2_bridge.MessageSet, and its final name component is "message_set_extension", + // then drop that last component. + mset := false + if extendedType == "*proto2_bridge.MessageSet" && typeName[len(typeName)-1] == "message_set_extension" { + typeName = typeName[:len(typeName)-1] + mset = true + } + + // For text formatting, the package must be exactly what the .proto file declares, + // ignoring overrides such as the go_package option, and with no dot/underscore mapping. + extName := strings.Join(typeName, ".") + if g.file.Package != nil { + extName = *g.file.Package + "." + extName + } + + g.P("var ", ccTypeName, " = &", g.Pkg["proto"], ".ExtensionDesc{") + g.In() + g.P("ExtendedType: (", extendedType, ")(nil),") + g.P("ExtensionType: (", fieldType, ")(nil),") + g.P("Field: ", field.Number, ",") + g.P(`Name: "`, extName, `",`) + g.P("Tag: ", tag, ",") + g.P(`Filename: "`, g.file.GetName(), `",`) + + g.Out() + g.P("}") + g.P() + + if mset { + // Generate a bit more code to register with message_set.go. + g.addInitf("%s.RegisterMessageSetType((%s)(nil), %d, %q)", g.Pkg["proto"], fieldType, *field.Number, extName) + } + + g.file.addExport(ext, constOrVarSymbol{ccTypeName, "var", ""}) +} + +func (g *Generator) generateInitFunction() { + for _, enum := range g.file.enum { + g.generateEnumRegistration(enum) + } + for _, d := range g.file.desc { + for _, ext := range d.ext { + g.generateExtensionRegistration(ext) + } + } + for _, ext := range g.file.ext { + g.generateExtensionRegistration(ext) + } + if len(g.init) == 0 { + return + } + g.P("func init() {") + g.In() + for _, l := range g.init { + g.P(l) + } + g.Out() + g.P("}") + g.init = nil +} + +func (g *Generator) generateFileDescriptor(file *FileDescriptor) { + // Make a copy and trim source_code_info data. + // TODO: Trim this more when we know exactly what we need. + pb := proto.Clone(file.FileDescriptorProto).(*descriptor.FileDescriptorProto) + pb.SourceCodeInfo = nil + + b, err := proto.Marshal(pb) + if err != nil { + g.Fail(err.Error()) + } + + var buf bytes.Buffer + w, _ := gzip.NewWriterLevel(&buf, gzip.BestCompression) + w.Write(b) + w.Close() + b = buf.Bytes() + + v := file.VarName() + g.P() + g.P("func init() { ", g.Pkg["proto"], ".RegisterFile(", strconv.Quote(*file.Name), ", ", v, ") }") + g.P("var ", v, " = []byte{") + g.In() + g.P("// ", len(b), " bytes of a gzipped FileDescriptorProto") + for len(b) > 0 { + n := 16 + if n > len(b) { + n = len(b) + } + + s := "" + for _, c := range b[:n] { + s += fmt.Sprintf("0x%02x,", c) + } + g.P(s) + + b = b[n:] + } + g.Out() + g.P("}") +} + +func (g *Generator) generateEnumRegistration(enum *EnumDescriptor) { + // // We always print the full (proto-world) package name here. + pkg := enum.File().GetPackage() + if pkg != "" { + pkg += "." + } + // The full type name + typeName := enum.TypeName() + // The full type name, CamelCased. + ccTypeName := CamelCaseSlice(typeName) + g.addInitf("%s.RegisterEnum(%q, %[3]s_name, %[3]s_value)", g.Pkg["proto"], pkg+ccTypeName, ccTypeName) +} + +func (g *Generator) generateExtensionRegistration(ext *ExtensionDescriptor) { + g.addInitf("%s.RegisterExtension(%s)", g.Pkg["proto"], ext.DescName()) +} + +// And now lots of helper functions. + +// Is c an ASCII lower-case letter? +func isASCIILower(c byte) bool { + return 'a' <= c && c <= 'z' +} + +// Is c an ASCII digit? +func isASCIIDigit(c byte) bool { + return '0' <= c && c <= '9' +} + +// CamelCase returns the CamelCased name. +// If there is an interior underscore followed by a lower case letter, +// drop the underscore and convert the letter to upper case. +// There is a remote possibility of this rewrite causing a name collision, +// but it's so remote we're prepared to pretend it's nonexistent - since the +// C++ generator lowercases names, it's extremely unlikely to have two fields +// with different capitalizations. +// In short, _my_field_name_2 becomes XMyFieldName_2. +func CamelCase(s string) string { + if s == "" { + return "" + } + t := make([]byte, 0, 32) + i := 0 + if s[0] == '_' { + // Need a capital letter; drop the '_'. + t = append(t, 'X') + i++ + } + // Invariant: if the next letter is lower case, it must be converted + // to upper case. + // That is, we process a word at a time, where words are marked by _ or + // upper case letter. Digits are treated as words. + for ; i < len(s); i++ { + c := s[i] + if c == '_' && i+1 < len(s) && isASCIILower(s[i+1]) { + continue // Skip the underscore in s. + } + if isASCIIDigit(c) { + t = append(t, c) + continue + } + // Assume we have a letter now - if not, it's a bogus identifier. + // The next word is a sequence of characters that must start upper case. + if isASCIILower(c) { + c ^= ' ' // Make it a capital letter. + } + t = append(t, c) // Guaranteed not lower case. + // Accept lower case sequence that follows. + for i+1 < len(s) && isASCIILower(s[i+1]) { + i++ + t = append(t, s[i]) + } + } + return string(t) +} + +// CamelCaseSlice is like CamelCase, but the argument is a slice of strings to +// be joined with "_". +func CamelCaseSlice(elem []string) string { return CamelCase(strings.Join(elem, "_")) } + +// dottedSlice turns a sliced name into a dotted name. +func dottedSlice(elem []string) string { return strings.Join(elem, ".") } + +// Is this field optional? +func isOptional(field *descriptor.FieldDescriptorProto) bool { + return field.Label != nil && *field.Label == descriptor.FieldDescriptorProto_LABEL_OPTIONAL +} + +// Is this field required? +func isRequired(field *descriptor.FieldDescriptorProto) bool { + return field.Label != nil && *field.Label == descriptor.FieldDescriptorProto_LABEL_REQUIRED +} + +// Is this field repeated? +func isRepeated(field *descriptor.FieldDescriptorProto) bool { + return field.Label != nil && *field.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED +} + +// Is this field a scalar numeric type? +func isScalar(field *descriptor.FieldDescriptorProto) bool { + if field.Type == nil { + return false + } + switch *field.Type { + case descriptor.FieldDescriptorProto_TYPE_DOUBLE, + descriptor.FieldDescriptorProto_TYPE_FLOAT, + descriptor.FieldDescriptorProto_TYPE_INT64, + descriptor.FieldDescriptorProto_TYPE_UINT64, + descriptor.FieldDescriptorProto_TYPE_INT32, + descriptor.FieldDescriptorProto_TYPE_FIXED64, + descriptor.FieldDescriptorProto_TYPE_FIXED32, + descriptor.FieldDescriptorProto_TYPE_BOOL, + descriptor.FieldDescriptorProto_TYPE_UINT32, + descriptor.FieldDescriptorProto_TYPE_ENUM, + descriptor.FieldDescriptorProto_TYPE_SFIXED32, + descriptor.FieldDescriptorProto_TYPE_SFIXED64, + descriptor.FieldDescriptorProto_TYPE_SINT32, + descriptor.FieldDescriptorProto_TYPE_SINT64: + return true + default: + return false + } +} + +// badToUnderscore is the mapping function used to generate Go names from package names, +// which can be dotted in the input .proto file. It replaces non-identifier characters such as +// dot or dash with underscore. +func badToUnderscore(r rune) rune { + if unicode.IsLetter(r) || unicode.IsDigit(r) || r == '_' { + return r + } + return '_' +} + +// baseName returns the last path element of the name, with the last dotted suffix removed. +func baseName(name string) string { + // First, find the last element + if i := strings.LastIndex(name, "/"); i >= 0 { + name = name[i+1:] + } + // Now drop the suffix + if i := strings.LastIndex(name, "."); i >= 0 { + name = name[0:i] + } + return name +} + +// The SourceCodeInfo message describes the location of elements of a parsed +// .proto file by way of a "path", which is a sequence of integers that +// describe the route from a FileDescriptorProto to the relevant submessage. +// The path alternates between a field number of a repeated field, and an index +// into that repeated field. The constants below define the field numbers that +// are used. +// +// See descriptor.proto for more information about this. +const ( + // tag numbers in FileDescriptorProto + packagePath = 2 // package + messagePath = 4 // message_type + enumPath = 5 // enum_type + // tag numbers in DescriptorProto + messageFieldPath = 2 // field + messageMessagePath = 3 // nested_type + messageEnumPath = 4 // enum_type + messageOneofPath = 8 // oneof_decl + // tag numbers in EnumDescriptorProto + enumValuePath = 2 // value +) diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/generator/name_test.go b/vendor/github.com/golang/protobuf/protoc-gen-go/generator/name_test.go new file mode 100644 index 000000000..76808f3b7 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/generator/name_test.go @@ -0,0 +1,114 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2013 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package generator + +import ( + "testing" + + "github.com/golang/protobuf/protoc-gen-go/descriptor" +) + +func TestCamelCase(t *testing.T) { + tests := []struct { + in, want string + }{ + {"one", "One"}, + {"one_two", "OneTwo"}, + {"_my_field_name_2", "XMyFieldName_2"}, + {"Something_Capped", "Something_Capped"}, + {"my_Name", "My_Name"}, + {"OneTwo", "OneTwo"}, + {"_", "X"}, + {"_a_", "XA_"}, + } + for _, tc := range tests { + if got := CamelCase(tc.in); got != tc.want { + t.Errorf("CamelCase(%q) = %q, want %q", tc.in, got, tc.want) + } + } +} + +func TestGoPackageOption(t *testing.T) { + tests := []struct { + in string + impPath, pkg string + ok bool + }{ + {"", "", "", false}, + {"foo", "", "foo", true}, + {"github.com/golang/bar", "github.com/golang/bar", "bar", true}, + {"github.com/golang/bar;baz", "github.com/golang/bar", "baz", true}, + } + for _, tc := range tests { + d := &FileDescriptor{ + FileDescriptorProto: &descriptor.FileDescriptorProto{ + Options: &descriptor.FileOptions{ + GoPackage: &tc.in, + }, + }, + } + impPath, pkg, ok := d.goPackageOption() + if impPath != tc.impPath || pkg != tc.pkg || ok != tc.ok { + t.Errorf("go_package = %q => (%q, %q, %t), want (%q, %q, %t)", tc.in, + impPath, pkg, ok, tc.impPath, tc.pkg, tc.ok) + } + } +} + +func TestUnescape(t *testing.T) { + tests := []struct { + in string + out string + }{ + // successful cases, including all kinds of escapes + {"", ""}, + {"foo bar baz frob nitz", "foo bar baz frob nitz"}, + {`\000\001\002\003\004\005\006\007`, string([]byte{0, 1, 2, 3, 4, 5, 6, 7})}, + {`\a\b\f\n\r\t\v\\\?\'\"`, string([]byte{'\a', '\b', '\f', '\n', '\r', '\t', '\v', '\\', '?', '\'', '"'})}, + {`\x10\x20\x30\x40\x50\x60\x70\x80`, string([]byte{16, 32, 48, 64, 80, 96, 112, 128})}, + // variable length octal escapes + {`\0\018\222\377\3\04\005\6\07`, string([]byte{0, 1, '8', 0222, 255, 3, 4, 5, 6, 7})}, + // malformed escape sequences left as is + {"foo \\g bar", "foo \\g bar"}, + {"foo \\xg0 bar", "foo \\xg0 bar"}, + {"\\", "\\"}, + {"\\x", "\\x"}, + {"\\xf", "\\xf"}, + {"\\777", "\\777"}, // overflows byte + } + for _, tc := range tests { + s := unescape(tc.in) + if s != tc.out { + t.Errorf("doUnescape(%q) = %q; should have been %q", tc.in, s, tc.out) + } + } +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/grpc/grpc.go b/vendor/github.com/golang/protobuf/protoc-gen-go/grpc/grpc.go new file mode 100644 index 000000000..2660e47a2 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/grpc/grpc.go @@ -0,0 +1,463 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2015 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Package grpc outputs gRPC service descriptions in Go code. +// It runs as a plugin for the Go protocol buffer compiler plugin. +// It is linked in to protoc-gen-go. +package grpc + +import ( + "fmt" + "path" + "strconv" + "strings" + + pb "github.com/golang/protobuf/protoc-gen-go/descriptor" + "github.com/golang/protobuf/protoc-gen-go/generator" +) + +// generatedCodeVersion indicates a version of the generated code. +// It is incremented whenever an incompatibility between the generated code and +// the grpc package is introduced; the generated code references +// a constant, grpc.SupportPackageIsVersionN (where N is generatedCodeVersion). +const generatedCodeVersion = 4 + +// Paths for packages used by code generated in this file, +// relative to the import_prefix of the generator.Generator. +const ( + contextPkgPath = "golang.org/x/net/context" + grpcPkgPath = "google.golang.org/grpc" +) + +func init() { + generator.RegisterPlugin(new(grpc)) +} + +// grpc is an implementation of the Go protocol buffer compiler's +// plugin architecture. It generates bindings for gRPC support. +type grpc struct { + gen *generator.Generator +} + +// Name returns the name of this plugin, "grpc". +func (g *grpc) Name() string { + return "grpc" +} + +// The names for packages imported in the generated code. +// They may vary from the final path component of the import path +// if the name is used by other packages. +var ( + contextPkg string + grpcPkg string +) + +// Init initializes the plugin. +func (g *grpc) Init(gen *generator.Generator) { + g.gen = gen + contextPkg = generator.RegisterUniquePackageName("context", nil) + grpcPkg = generator.RegisterUniquePackageName("grpc", nil) +} + +// Given a type name defined in a .proto, return its object. +// Also record that we're using it, to guarantee the associated import. +func (g *grpc) objectNamed(name string) generator.Object { + g.gen.RecordTypeUse(name) + return g.gen.ObjectNamed(name) +} + +// Given a type name defined in a .proto, return its name as we will print it. +func (g *grpc) typeName(str string) string { + return g.gen.TypeName(g.objectNamed(str)) +} + +// P forwards to g.gen.P. +func (g *grpc) P(args ...interface{}) { g.gen.P(args...) } + +// Generate generates code for the services in the given file. +func (g *grpc) Generate(file *generator.FileDescriptor) { + if len(file.FileDescriptorProto.Service) == 0 { + return + } + + g.P("// Reference imports to suppress errors if they are not otherwise used.") + g.P("var _ ", contextPkg, ".Context") + g.P("var _ ", grpcPkg, ".ClientConn") + g.P() + + // Assert version compatibility. + g.P("// This is a compile-time assertion to ensure that this generated file") + g.P("// is compatible with the grpc package it is being compiled against.") + g.P("const _ = ", grpcPkg, ".SupportPackageIsVersion", generatedCodeVersion) + g.P() + + for i, service := range file.FileDescriptorProto.Service { + g.generateService(file, service, i) + } +} + +// GenerateImports generates the import declaration for this file. +func (g *grpc) GenerateImports(file *generator.FileDescriptor) { + if len(file.FileDescriptorProto.Service) == 0 { + return + } + g.P("import (") + g.P(contextPkg, " ", strconv.Quote(path.Join(g.gen.ImportPrefix, contextPkgPath))) + g.P(grpcPkg, " ", strconv.Quote(path.Join(g.gen.ImportPrefix, grpcPkgPath))) + g.P(")") + g.P() +} + +// reservedClientName records whether a client name is reserved on the client side. +var reservedClientName = map[string]bool{ +// TODO: do we need any in gRPC? +} + +func unexport(s string) string { return strings.ToLower(s[:1]) + s[1:] } + +// generateService generates all the code for the named service. +func (g *grpc) generateService(file *generator.FileDescriptor, service *pb.ServiceDescriptorProto, index int) { + path := fmt.Sprintf("6,%d", index) // 6 means service. + + origServName := service.GetName() + fullServName := origServName + if pkg := file.GetPackage(); pkg != "" { + fullServName = pkg + "." + fullServName + } + servName := generator.CamelCase(origServName) + + g.P() + g.P("// Client API for ", servName, " service") + g.P() + + // Client interface. + g.P("type ", servName, "Client interface {") + for i, method := range service.Method { + g.gen.PrintComments(fmt.Sprintf("%s,2,%d", path, i)) // 2 means method in a service. + g.P(g.generateClientSignature(servName, method)) + } + g.P("}") + g.P() + + // Client structure. + g.P("type ", unexport(servName), "Client struct {") + g.P("cc *", grpcPkg, ".ClientConn") + g.P("}") + g.P() + + // NewClient factory. + g.P("func New", servName, "Client (cc *", grpcPkg, ".ClientConn) ", servName, "Client {") + g.P("return &", unexport(servName), "Client{cc}") + g.P("}") + g.P() + + var methodIndex, streamIndex int + serviceDescVar := "_" + servName + "_serviceDesc" + // Client method implementations. + for _, method := range service.Method { + var descExpr string + if !method.GetServerStreaming() && !method.GetClientStreaming() { + // Unary RPC method + descExpr = fmt.Sprintf("&%s.Methods[%d]", serviceDescVar, methodIndex) + methodIndex++ + } else { + // Streaming RPC method + descExpr = fmt.Sprintf("&%s.Streams[%d]", serviceDescVar, streamIndex) + streamIndex++ + } + g.generateClientMethod(servName, fullServName, serviceDescVar, method, descExpr) + } + + g.P("// Server API for ", servName, " service") + g.P() + + // Server interface. + serverType := servName + "Server" + g.P("type ", serverType, " interface {") + for i, method := range service.Method { + g.gen.PrintComments(fmt.Sprintf("%s,2,%d", path, i)) // 2 means method in a service. + g.P(g.generateServerSignature(servName, method)) + } + g.P("}") + g.P() + + // Server registration. + g.P("func Register", servName, "Server(s *", grpcPkg, ".Server, srv ", serverType, ") {") + g.P("s.RegisterService(&", serviceDescVar, `, srv)`) + g.P("}") + g.P() + + // Server handler implementations. + var handlerNames []string + for _, method := range service.Method { + hname := g.generateServerMethod(servName, fullServName, method) + handlerNames = append(handlerNames, hname) + } + + // Service descriptor. + g.P("var ", serviceDescVar, " = ", grpcPkg, ".ServiceDesc {") + g.P("ServiceName: ", strconv.Quote(fullServName), ",") + g.P("HandlerType: (*", serverType, ")(nil),") + g.P("Methods: []", grpcPkg, ".MethodDesc{") + for i, method := range service.Method { + if method.GetServerStreaming() || method.GetClientStreaming() { + continue + } + g.P("{") + g.P("MethodName: ", strconv.Quote(method.GetName()), ",") + g.P("Handler: ", handlerNames[i], ",") + g.P("},") + } + g.P("},") + g.P("Streams: []", grpcPkg, ".StreamDesc{") + for i, method := range service.Method { + if !method.GetServerStreaming() && !method.GetClientStreaming() { + continue + } + g.P("{") + g.P("StreamName: ", strconv.Quote(method.GetName()), ",") + g.P("Handler: ", handlerNames[i], ",") + if method.GetServerStreaming() { + g.P("ServerStreams: true,") + } + if method.GetClientStreaming() { + g.P("ClientStreams: true,") + } + g.P("},") + } + g.P("},") + g.P("Metadata: \"", file.GetName(), "\",") + g.P("}") + g.P() +} + +// generateClientSignature returns the client-side signature for a method. +func (g *grpc) generateClientSignature(servName string, method *pb.MethodDescriptorProto) string { + origMethName := method.GetName() + methName := generator.CamelCase(origMethName) + if reservedClientName[methName] { + methName += "_" + } + reqArg := ", in *" + g.typeName(method.GetInputType()) + if method.GetClientStreaming() { + reqArg = "" + } + respName := "*" + g.typeName(method.GetOutputType()) + if method.GetServerStreaming() || method.GetClientStreaming() { + respName = servName + "_" + generator.CamelCase(origMethName) + "Client" + } + return fmt.Sprintf("%s(ctx %s.Context%s, opts ...%s.CallOption) (%s, error)", methName, contextPkg, reqArg, grpcPkg, respName) +} + +func (g *grpc) generateClientMethod(servName, fullServName, serviceDescVar string, method *pb.MethodDescriptorProto, descExpr string) { + sname := fmt.Sprintf("/%s/%s", fullServName, method.GetName()) + methName := generator.CamelCase(method.GetName()) + inType := g.typeName(method.GetInputType()) + outType := g.typeName(method.GetOutputType()) + + g.P("func (c *", unexport(servName), "Client) ", g.generateClientSignature(servName, method), "{") + if !method.GetServerStreaming() && !method.GetClientStreaming() { + g.P("out := new(", outType, ")") + // TODO: Pass descExpr to Invoke. + g.P("err := ", grpcPkg, `.Invoke(ctx, "`, sname, `", in, out, c.cc, opts...)`) + g.P("if err != nil { return nil, err }") + g.P("return out, nil") + g.P("}") + g.P() + return + } + streamType := unexport(servName) + methName + "Client" + g.P("stream, err := ", grpcPkg, ".NewClientStream(ctx, ", descExpr, `, c.cc, "`, sname, `", opts...)`) + g.P("if err != nil { return nil, err }") + g.P("x := &", streamType, "{stream}") + if !method.GetClientStreaming() { + g.P("if err := x.ClientStream.SendMsg(in); err != nil { return nil, err }") + g.P("if err := x.ClientStream.CloseSend(); err != nil { return nil, err }") + } + g.P("return x, nil") + g.P("}") + g.P() + + genSend := method.GetClientStreaming() + genRecv := method.GetServerStreaming() + genCloseAndRecv := !method.GetServerStreaming() + + // Stream auxiliary types and methods. + g.P("type ", servName, "_", methName, "Client interface {") + if genSend { + g.P("Send(*", inType, ") error") + } + if genRecv { + g.P("Recv() (*", outType, ", error)") + } + if genCloseAndRecv { + g.P("CloseAndRecv() (*", outType, ", error)") + } + g.P(grpcPkg, ".ClientStream") + g.P("}") + g.P() + + g.P("type ", streamType, " struct {") + g.P(grpcPkg, ".ClientStream") + g.P("}") + g.P() + + if genSend { + g.P("func (x *", streamType, ") Send(m *", inType, ") error {") + g.P("return x.ClientStream.SendMsg(m)") + g.P("}") + g.P() + } + if genRecv { + g.P("func (x *", streamType, ") Recv() (*", outType, ", error) {") + g.P("m := new(", outType, ")") + g.P("if err := x.ClientStream.RecvMsg(m); err != nil { return nil, err }") + g.P("return m, nil") + g.P("}") + g.P() + } + if genCloseAndRecv { + g.P("func (x *", streamType, ") CloseAndRecv() (*", outType, ", error) {") + g.P("if err := x.ClientStream.CloseSend(); err != nil { return nil, err }") + g.P("m := new(", outType, ")") + g.P("if err := x.ClientStream.RecvMsg(m); err != nil { return nil, err }") + g.P("return m, nil") + g.P("}") + g.P() + } +} + +// generateServerSignature returns the server-side signature for a method. +func (g *grpc) generateServerSignature(servName string, method *pb.MethodDescriptorProto) string { + origMethName := method.GetName() + methName := generator.CamelCase(origMethName) + if reservedClientName[methName] { + methName += "_" + } + + var reqArgs []string + ret := "error" + if !method.GetServerStreaming() && !method.GetClientStreaming() { + reqArgs = append(reqArgs, contextPkg+".Context") + ret = "(*" + g.typeName(method.GetOutputType()) + ", error)" + } + if !method.GetClientStreaming() { + reqArgs = append(reqArgs, "*"+g.typeName(method.GetInputType())) + } + if method.GetServerStreaming() || method.GetClientStreaming() { + reqArgs = append(reqArgs, servName+"_"+generator.CamelCase(origMethName)+"Server") + } + + return methName + "(" + strings.Join(reqArgs, ", ") + ") " + ret +} + +func (g *grpc) generateServerMethod(servName, fullServName string, method *pb.MethodDescriptorProto) string { + methName := generator.CamelCase(method.GetName()) + hname := fmt.Sprintf("_%s_%s_Handler", servName, methName) + inType := g.typeName(method.GetInputType()) + outType := g.typeName(method.GetOutputType()) + + if !method.GetServerStreaming() && !method.GetClientStreaming() { + g.P("func ", hname, "(srv interface{}, ctx ", contextPkg, ".Context, dec func(interface{}) error, interceptor ", grpcPkg, ".UnaryServerInterceptor) (interface{}, error) {") + g.P("in := new(", inType, ")") + g.P("if err := dec(in); err != nil { return nil, err }") + g.P("if interceptor == nil { return srv.(", servName, "Server).", methName, "(ctx, in) }") + g.P("info := &", grpcPkg, ".UnaryServerInfo{") + g.P("Server: srv,") + g.P("FullMethod: ", strconv.Quote(fmt.Sprintf("/%s/%s", fullServName, methName)), ",") + g.P("}") + g.P("handler := func(ctx ", contextPkg, ".Context, req interface{}) (interface{}, error) {") + g.P("return srv.(", servName, "Server).", methName, "(ctx, req.(*", inType, "))") + g.P("}") + g.P("return interceptor(ctx, in, info, handler)") + g.P("}") + g.P() + return hname + } + streamType := unexport(servName) + methName + "Server" + g.P("func ", hname, "(srv interface{}, stream ", grpcPkg, ".ServerStream) error {") + if !method.GetClientStreaming() { + g.P("m := new(", inType, ")") + g.P("if err := stream.RecvMsg(m); err != nil { return err }") + g.P("return srv.(", servName, "Server).", methName, "(m, &", streamType, "{stream})") + } else { + g.P("return srv.(", servName, "Server).", methName, "(&", streamType, "{stream})") + } + g.P("}") + g.P() + + genSend := method.GetServerStreaming() + genSendAndClose := !method.GetServerStreaming() + genRecv := method.GetClientStreaming() + + // Stream auxiliary types and methods. + g.P("type ", servName, "_", methName, "Server interface {") + if genSend { + g.P("Send(*", outType, ") error") + } + if genSendAndClose { + g.P("SendAndClose(*", outType, ") error") + } + if genRecv { + g.P("Recv() (*", inType, ", error)") + } + g.P(grpcPkg, ".ServerStream") + g.P("}") + g.P() + + g.P("type ", streamType, " struct {") + g.P(grpcPkg, ".ServerStream") + g.P("}") + g.P() + + if genSend { + g.P("func (x *", streamType, ") Send(m *", outType, ") error {") + g.P("return x.ServerStream.SendMsg(m)") + g.P("}") + g.P() + } + if genSendAndClose { + g.P("func (x *", streamType, ") SendAndClose(m *", outType, ") error {") + g.P("return x.ServerStream.SendMsg(m)") + g.P("}") + g.P() + } + if genRecv { + g.P("func (x *", streamType, ") Recv() (*", inType, ", error) {") + g.P("m := new(", inType, ")") + g.P("if err := x.ServerStream.RecvMsg(m); err != nil { return nil, err }") + g.P("return m, nil") + g.P("}") + g.P() + } + + return hname +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/link_grpc.go b/vendor/github.com/golang/protobuf/protoc-gen-go/link_grpc.go new file mode 100644 index 000000000..532a55005 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/link_grpc.go @@ -0,0 +1,34 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2015 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package main + +import _ "github.com/golang/protobuf/protoc-gen-go/grpc" diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/main.go b/vendor/github.com/golang/protobuf/protoc-gen-go/main.go new file mode 100644 index 000000000..8e2486de0 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/main.go @@ -0,0 +1,98 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2010 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// protoc-gen-go is a plugin for the Google protocol buffer compiler to generate +// Go code. Run it by building this program and putting it in your path with +// the name +// protoc-gen-go +// That word 'go' at the end becomes part of the option string set for the +// protocol compiler, so once the protocol compiler (protoc) is installed +// you can run +// protoc --go_out=output_directory input_directory/file.proto +// to generate Go bindings for the protocol defined by file.proto. +// With that input, the output will be written to +// output_directory/file.pb.go +// +// The generated code is documented in the package comment for +// the library. +// +// See the README and documentation for protocol buffers to learn more: +// https://developers.google.com/protocol-buffers/ +package main + +import ( + "io/ioutil" + "os" + + "github.com/golang/protobuf/proto" + "github.com/golang/protobuf/protoc-gen-go/generator" +) + +func main() { + // Begin by allocating a generator. The request and response structures are stored there + // so we can do error handling easily - the response structure contains the field to + // report failure. + g := generator.New() + + data, err := ioutil.ReadAll(os.Stdin) + if err != nil { + g.Error(err, "reading input") + } + + if err := proto.Unmarshal(data, g.Request); err != nil { + g.Error(err, "parsing input proto") + } + + if len(g.Request.FileToGenerate) == 0 { + g.Fail("no files to generate") + } + + g.CommandLineParameters(g.Request.GetParameter()) + + // Create a wrapped version of the Descriptors and EnumDescriptors that + // point to the file that defines them. + g.WrapTypes() + + g.SetPackageNames() + g.BuildTypeNameMap() + + g.GenerateAllFiles() + + // Send back the results. + data, err = proto.Marshal(g.Response) + if err != nil { + g.Error(err, "failed to marshal output proto") + } + _, err = os.Stdout.Write(data) + if err != nil { + g.Error(err, "failed to write output proto") + } +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/Makefile b/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/Makefile new file mode 100644 index 000000000..bc0463d57 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/Makefile @@ -0,0 +1,45 @@ +# Go support for Protocol Buffers - Google's data interchange format +# +# Copyright 2010 The Go Authors. All rights reserved. +# https://github.com/golang/protobuf +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# Not stored here, but plugin.proto is in https://github.com/google/protobuf/ +# at src/google/protobuf/compiler/plugin.proto +# Also we need to fix an import. +regenerate: + @echo WARNING! THIS RULE IS PROBABLY NOT RIGHT FOR YOUR INSTALLATION + cp $(HOME)/src/protobuf/include/google/protobuf/compiler/plugin.proto . + protoc --go_out=Mgoogle/protobuf/descriptor.proto=github.com/golang/protobuf/protoc-gen-go/descriptor:../../../../.. \ + -I$(HOME)/src/protobuf/include $(HOME)/src/protobuf/include/google/protobuf/compiler/plugin.proto + +restore: + cp plugin.pb.golden plugin.pb.go + +preserve: + cp plugin.pb.go plugin.pb.golden diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.pb.go new file mode 100644 index 000000000..c608a248b --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.pb.go @@ -0,0 +1,293 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/protobuf/compiler/plugin.proto + +/* +Package plugin_go is a generated protocol buffer package. + +It is generated from these files: + google/protobuf/compiler/plugin.proto + +It has these top-level messages: + Version + CodeGeneratorRequest + CodeGeneratorResponse +*/ +package plugin_go + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import google_protobuf "github.com/golang/protobuf/protoc-gen-go/descriptor" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The version number of protocol compiler. +type Version struct { + Major *int32 `protobuf:"varint,1,opt,name=major" json:"major,omitempty"` + Minor *int32 `protobuf:"varint,2,opt,name=minor" json:"minor,omitempty"` + Patch *int32 `protobuf:"varint,3,opt,name=patch" json:"patch,omitempty"` + // A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should + // be empty for mainline stable releases. + Suffix *string `protobuf:"bytes,4,opt,name=suffix" json:"suffix,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *Version) Reset() { *m = Version{} } +func (m *Version) String() string { return proto.CompactTextString(m) } +func (*Version) ProtoMessage() {} +func (*Version) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } + +func (m *Version) GetMajor() int32 { + if m != nil && m.Major != nil { + return *m.Major + } + return 0 +} + +func (m *Version) GetMinor() int32 { + if m != nil && m.Minor != nil { + return *m.Minor + } + return 0 +} + +func (m *Version) GetPatch() int32 { + if m != nil && m.Patch != nil { + return *m.Patch + } + return 0 +} + +func (m *Version) GetSuffix() string { + if m != nil && m.Suffix != nil { + return *m.Suffix + } + return "" +} + +// An encoded CodeGeneratorRequest is written to the plugin's stdin. +type CodeGeneratorRequest struct { + // The .proto files that were explicitly listed on the command-line. The + // code generator should generate code only for these files. Each file's + // descriptor will be included in proto_file, below. + FileToGenerate []string `protobuf:"bytes,1,rep,name=file_to_generate,json=fileToGenerate" json:"file_to_generate,omitempty"` + // The generator parameter passed on the command-line. + Parameter *string `protobuf:"bytes,2,opt,name=parameter" json:"parameter,omitempty"` + // FileDescriptorProtos for all files in files_to_generate and everything + // they import. The files will appear in topological order, so each file + // appears before any file that imports it. + // + // protoc guarantees that all proto_files will be written after + // the fields above, even though this is not technically guaranteed by the + // protobuf wire format. This theoretically could allow a plugin to stream + // in the FileDescriptorProtos and handle them one by one rather than read + // the entire set into memory at once. However, as of this writing, this + // is not similarly optimized on protoc's end -- it will store all fields in + // memory at once before sending them to the plugin. + // + // Type names of fields and extensions in the FileDescriptorProto are always + // fully qualified. + ProtoFile []*google_protobuf.FileDescriptorProto `protobuf:"bytes,15,rep,name=proto_file,json=protoFile" json:"proto_file,omitempty"` + // The version number of protocol compiler. + CompilerVersion *Version `protobuf:"bytes,3,opt,name=compiler_version,json=compilerVersion" json:"compiler_version,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *CodeGeneratorRequest) Reset() { *m = CodeGeneratorRequest{} } +func (m *CodeGeneratorRequest) String() string { return proto.CompactTextString(m) } +func (*CodeGeneratorRequest) ProtoMessage() {} +func (*CodeGeneratorRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } + +func (m *CodeGeneratorRequest) GetFileToGenerate() []string { + if m != nil { + return m.FileToGenerate + } + return nil +} + +func (m *CodeGeneratorRequest) GetParameter() string { + if m != nil && m.Parameter != nil { + return *m.Parameter + } + return "" +} + +func (m *CodeGeneratorRequest) GetProtoFile() []*google_protobuf.FileDescriptorProto { + if m != nil { + return m.ProtoFile + } + return nil +} + +func (m *CodeGeneratorRequest) GetCompilerVersion() *Version { + if m != nil { + return m.CompilerVersion + } + return nil +} + +// The plugin writes an encoded CodeGeneratorResponse to stdout. +type CodeGeneratorResponse struct { + // Error message. If non-empty, code generation failed. The plugin process + // should exit with status code zero even if it reports an error in this way. + // + // This should be used to indicate errors in .proto files which prevent the + // code generator from generating correct code. Errors which indicate a + // problem in protoc itself -- such as the input CodeGeneratorRequest being + // unparseable -- should be reported by writing a message to stderr and + // exiting with a non-zero status code. + Error *string `protobuf:"bytes,1,opt,name=error" json:"error,omitempty"` + File []*CodeGeneratorResponse_File `protobuf:"bytes,15,rep,name=file" json:"file,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *CodeGeneratorResponse) Reset() { *m = CodeGeneratorResponse{} } +func (m *CodeGeneratorResponse) String() string { return proto.CompactTextString(m) } +func (*CodeGeneratorResponse) ProtoMessage() {} +func (*CodeGeneratorResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } + +func (m *CodeGeneratorResponse) GetError() string { + if m != nil && m.Error != nil { + return *m.Error + } + return "" +} + +func (m *CodeGeneratorResponse) GetFile() []*CodeGeneratorResponse_File { + if m != nil { + return m.File + } + return nil +} + +// Represents a single generated file. +type CodeGeneratorResponse_File struct { + // The file name, relative to the output directory. The name must not + // contain "." or ".." components and must be relative, not be absolute (so, + // the file cannot lie outside the output directory). "/" must be used as + // the path separator, not "\". + // + // If the name is omitted, the content will be appended to the previous + // file. This allows the generator to break large files into small chunks, + // and allows the generated text to be streamed back to protoc so that large + // files need not reside completely in memory at one time. Note that as of + // this writing protoc does not optimize for this -- it will read the entire + // CodeGeneratorResponse before writing files to disk. + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // If non-empty, indicates that the named file should already exist, and the + // content here is to be inserted into that file at a defined insertion + // point. This feature allows a code generator to extend the output + // produced by another code generator. The original generator may provide + // insertion points by placing special annotations in the file that look + // like: + // @@protoc_insertion_point(NAME) + // The annotation can have arbitrary text before and after it on the line, + // which allows it to be placed in a comment. NAME should be replaced with + // an identifier naming the point -- this is what other generators will use + // as the insertion_point. Code inserted at this point will be placed + // immediately above the line containing the insertion point (thus multiple + // insertions to the same point will come out in the order they were added). + // The double-@ is intended to make it unlikely that the generated code + // could contain things that look like insertion points by accident. + // + // For example, the C++ code generator places the following line in the + // .pb.h files that it generates: + // // @@protoc_insertion_point(namespace_scope) + // This line appears within the scope of the file's package namespace, but + // outside of any particular class. Another plugin can then specify the + // insertion_point "namespace_scope" to generate additional classes or + // other declarations that should be placed in this scope. + // + // Note that if the line containing the insertion point begins with + // whitespace, the same whitespace will be added to every line of the + // inserted text. This is useful for languages like Python, where + // indentation matters. In these languages, the insertion point comment + // should be indented the same amount as any inserted code will need to be + // in order to work correctly in that context. + // + // The code generator that generates the initial file and the one which + // inserts into it must both run as part of a single invocation of protoc. + // Code generators are executed in the order in which they appear on the + // command line. + // + // If |insertion_point| is present, |name| must also be present. + InsertionPoint *string `protobuf:"bytes,2,opt,name=insertion_point,json=insertionPoint" json:"insertion_point,omitempty"` + // The file contents. + Content *string `protobuf:"bytes,15,opt,name=content" json:"content,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *CodeGeneratorResponse_File) Reset() { *m = CodeGeneratorResponse_File{} } +func (m *CodeGeneratorResponse_File) String() string { return proto.CompactTextString(m) } +func (*CodeGeneratorResponse_File) ProtoMessage() {} +func (*CodeGeneratorResponse_File) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2, 0} } + +func (m *CodeGeneratorResponse_File) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *CodeGeneratorResponse_File) GetInsertionPoint() string { + if m != nil && m.InsertionPoint != nil { + return *m.InsertionPoint + } + return "" +} + +func (m *CodeGeneratorResponse_File) GetContent() string { + if m != nil && m.Content != nil { + return *m.Content + } + return "" +} + +func init() { + proto.RegisterType((*Version)(nil), "google.protobuf.compiler.Version") + proto.RegisterType((*CodeGeneratorRequest)(nil), "google.protobuf.compiler.CodeGeneratorRequest") + proto.RegisterType((*CodeGeneratorResponse)(nil), "google.protobuf.compiler.CodeGeneratorResponse") + proto.RegisterType((*CodeGeneratorResponse_File)(nil), "google.protobuf.compiler.CodeGeneratorResponse.File") +} + +func init() { proto.RegisterFile("google/protobuf/compiler/plugin.proto", fileDescriptor0) } + +var fileDescriptor0 = []byte{ + // 417 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x92, 0xcf, 0x6a, 0x14, 0x41, + 0x10, 0xc6, 0x19, 0x77, 0x63, 0x98, 0x8a, 0x64, 0x43, 0x13, 0xa5, 0x09, 0x39, 0x8c, 0x8b, 0xe2, + 0x5c, 0x32, 0x0b, 0xc1, 0x8b, 0x78, 0x4b, 0x44, 0x3d, 0x78, 0x58, 0x1a, 0xf1, 0x20, 0xc8, 0x30, + 0x99, 0xd4, 0x74, 0x5a, 0x66, 0xba, 0xc6, 0xee, 0x1e, 0xf1, 0x49, 0x7d, 0x0f, 0xdf, 0x40, 0xfa, + 0xcf, 0x24, 0xb2, 0xb8, 0xa7, 0xee, 0xef, 0x57, 0xd5, 0xd5, 0x55, 0x1f, 0x05, 0x2f, 0x25, 0x91, + 0xec, 0x71, 0x33, 0x1a, 0x72, 0x74, 0x33, 0x75, 0x9b, 0x96, 0x86, 0x51, 0xf5, 0x68, 0x36, 0x63, + 0x3f, 0x49, 0xa5, 0xab, 0x10, 0x60, 0x3c, 0xa6, 0x55, 0x73, 0x5a, 0x35, 0xa7, 0x9d, 0x15, 0xbb, + 0x05, 0x6e, 0xd1, 0xb6, 0x46, 0x8d, 0x8e, 0x4c, 0xcc, 0x5e, 0xb7, 0x70, 0xf8, 0x05, 0x8d, 0x55, + 0xa4, 0xd9, 0x29, 0x1c, 0x0c, 0xcd, 0x77, 0x32, 0x3c, 0x2b, 0xb2, 0xf2, 0x40, 0x44, 0x11, 0xa8, + 0xd2, 0x64, 0xf8, 0xa3, 0x44, 0xbd, 0xf0, 0x74, 0x6c, 0x5c, 0x7b, 0xc7, 0x17, 0x91, 0x06, 0xc1, + 0x9e, 0xc1, 0x63, 0x3b, 0x75, 0x9d, 0xfa, 0xc5, 0x97, 0x45, 0x56, 0xe6, 0x22, 0xa9, 0xf5, 0x9f, + 0x0c, 0x4e, 0xaf, 0xe9, 0x16, 0x3f, 0xa0, 0x46, 0xd3, 0x38, 0x32, 0x02, 0x7f, 0x4c, 0x68, 0x1d, + 0x2b, 0xe1, 0xa4, 0x53, 0x3d, 0xd6, 0x8e, 0x6a, 0x19, 0x63, 0xc8, 0xb3, 0x62, 0x51, 0xe6, 0xe2, + 0xd8, 0xf3, 0xcf, 0x94, 0x5e, 0x20, 0x3b, 0x87, 0x7c, 0x6c, 0x4c, 0x33, 0xa0, 0xc3, 0xd8, 0x4a, + 0x2e, 0x1e, 0x00, 0xbb, 0x06, 0x08, 0xe3, 0xd4, 0xfe, 0x15, 0x5f, 0x15, 0x8b, 0xf2, 0xe8, 0xf2, + 0x45, 0xb5, 0x6b, 0xcb, 0x7b, 0xd5, 0xe3, 0xbb, 0x7b, 0x03, 0xb6, 0x1e, 0x8b, 0x3c, 0x44, 0x7d, + 0x84, 0x7d, 0x82, 0x93, 0xd9, 0xb8, 0xfa, 0x67, 0xf4, 0x24, 0x8c, 0x77, 0x74, 0xf9, 0xbc, 0xda, + 0xe7, 0x70, 0x95, 0xcc, 0x13, 0xab, 0x99, 0x24, 0xb0, 0xfe, 0x9d, 0xc1, 0xd3, 0x9d, 0x99, 0xed, + 0x48, 0xda, 0xa2, 0xf7, 0x0e, 0x8d, 0x49, 0x3e, 0xe7, 0x22, 0x0a, 0xf6, 0x11, 0x96, 0xff, 0x34, + 0xff, 0x7a, 0xff, 0x8f, 0xff, 0x2d, 0x1a, 0x66, 0x13, 0xa1, 0xc2, 0xd9, 0x37, 0x58, 0x86, 0x79, + 0x18, 0x2c, 0x75, 0x33, 0x60, 0xfa, 0x26, 0xdc, 0xd9, 0x2b, 0x58, 0x29, 0x6d, 0xd1, 0x38, 0x45, + 0xba, 0x1e, 0x49, 0x69, 0x97, 0xcc, 0x3c, 0xbe, 0xc7, 0x5b, 0x4f, 0x19, 0x87, 0xc3, 0x96, 0xb4, + 0x43, 0xed, 0xf8, 0x2a, 0x24, 0xcc, 0xf2, 0x4a, 0xc2, 0x79, 0x4b, 0xc3, 0xde, 0xfe, 0xae, 0x9e, + 0x6c, 0xc3, 0x6e, 0x06, 0x7b, 0xed, 0xd7, 0x37, 0x52, 0xb9, 0xbb, 0xe9, 0xc6, 0x87, 0x37, 0x92, + 0xfa, 0x46, 0xcb, 0x87, 0x65, 0x0c, 0x97, 0xf6, 0x42, 0xa2, 0xbe, 0x90, 0x94, 0x56, 0xfa, 0x6d, + 0x3c, 0x6a, 0x49, 0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, 0xf7, 0x15, 0x40, 0xc5, 0xfe, 0x02, 0x00, + 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.pb.golden b/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.pb.golden new file mode 100644 index 000000000..8953d0ff8 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.pb.golden @@ -0,0 +1,83 @@ +// Code generated by protoc-gen-go. +// source: google/protobuf/compiler/plugin.proto +// DO NOT EDIT! + +package google_protobuf_compiler + +import proto "github.com/golang/protobuf/proto" +import "math" +import google_protobuf "github.com/golang/protobuf/protoc-gen-go/descriptor" + +// Reference proto and math imports to suppress error if they are not otherwise used. +var _ = proto.GetString +var _ = math.Inf + +type CodeGeneratorRequest struct { + FileToGenerate []string `protobuf:"bytes,1,rep,name=file_to_generate" json:"file_to_generate,omitempty"` + Parameter *string `protobuf:"bytes,2,opt,name=parameter" json:"parameter,omitempty"` + ProtoFile []*google_protobuf.FileDescriptorProto `protobuf:"bytes,15,rep,name=proto_file" json:"proto_file,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (this *CodeGeneratorRequest) Reset() { *this = CodeGeneratorRequest{} } +func (this *CodeGeneratorRequest) String() string { return proto.CompactTextString(this) } +func (*CodeGeneratorRequest) ProtoMessage() {} + +func (this *CodeGeneratorRequest) GetParameter() string { + if this != nil && this.Parameter != nil { + return *this.Parameter + } + return "" +} + +type CodeGeneratorResponse struct { + Error *string `protobuf:"bytes,1,opt,name=error" json:"error,omitempty"` + File []*CodeGeneratorResponse_File `protobuf:"bytes,15,rep,name=file" json:"file,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (this *CodeGeneratorResponse) Reset() { *this = CodeGeneratorResponse{} } +func (this *CodeGeneratorResponse) String() string { return proto.CompactTextString(this) } +func (*CodeGeneratorResponse) ProtoMessage() {} + +func (this *CodeGeneratorResponse) GetError() string { + if this != nil && this.Error != nil { + return *this.Error + } + return "" +} + +type CodeGeneratorResponse_File struct { + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + InsertionPoint *string `protobuf:"bytes,2,opt,name=insertion_point" json:"insertion_point,omitempty"` + Content *string `protobuf:"bytes,15,opt,name=content" json:"content,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (this *CodeGeneratorResponse_File) Reset() { *this = CodeGeneratorResponse_File{} } +func (this *CodeGeneratorResponse_File) String() string { return proto.CompactTextString(this) } +func (*CodeGeneratorResponse_File) ProtoMessage() {} + +func (this *CodeGeneratorResponse_File) GetName() string { + if this != nil && this.Name != nil { + return *this.Name + } + return "" +} + +func (this *CodeGeneratorResponse_File) GetInsertionPoint() string { + if this != nil && this.InsertionPoint != nil { + return *this.InsertionPoint + } + return "" +} + +func (this *CodeGeneratorResponse_File) GetContent() string { + if this != nil && this.Content != nil { + return *this.Content + } + return "" +} + +func init() { +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.proto new file mode 100644 index 000000000..5b5574529 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.proto @@ -0,0 +1,167 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// +// WARNING: The plugin interface is currently EXPERIMENTAL and is subject to +// change. +// +// protoc (aka the Protocol Compiler) can be extended via plugins. A plugin is +// just a program that reads a CodeGeneratorRequest from stdin and writes a +// CodeGeneratorResponse to stdout. +// +// Plugins written using C++ can use google/protobuf/compiler/plugin.h instead +// of dealing with the raw protocol defined here. +// +// A plugin executable needs only to be placed somewhere in the path. The +// plugin should be named "protoc-gen-$NAME", and will then be used when the +// flag "--${NAME}_out" is passed to protoc. + +syntax = "proto2"; +package google.protobuf.compiler; +option java_package = "com.google.protobuf.compiler"; +option java_outer_classname = "PluginProtos"; + +option go_package = "github.com/golang/protobuf/protoc-gen-go/plugin;plugin_go"; + +import "google/protobuf/descriptor.proto"; + +// The version number of protocol compiler. +message Version { + optional int32 major = 1; + optional int32 minor = 2; + optional int32 patch = 3; + // A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should + // be empty for mainline stable releases. + optional string suffix = 4; +} + +// An encoded CodeGeneratorRequest is written to the plugin's stdin. +message CodeGeneratorRequest { + // The .proto files that were explicitly listed on the command-line. The + // code generator should generate code only for these files. Each file's + // descriptor will be included in proto_file, below. + repeated string file_to_generate = 1; + + // The generator parameter passed on the command-line. + optional string parameter = 2; + + // FileDescriptorProtos for all files in files_to_generate and everything + // they import. The files will appear in topological order, so each file + // appears before any file that imports it. + // + // protoc guarantees that all proto_files will be written after + // the fields above, even though this is not technically guaranteed by the + // protobuf wire format. This theoretically could allow a plugin to stream + // in the FileDescriptorProtos and handle them one by one rather than read + // the entire set into memory at once. However, as of this writing, this + // is not similarly optimized on protoc's end -- it will store all fields in + // memory at once before sending them to the plugin. + // + // Type names of fields and extensions in the FileDescriptorProto are always + // fully qualified. + repeated FileDescriptorProto proto_file = 15; + + // The version number of protocol compiler. + optional Version compiler_version = 3; + +} + +// The plugin writes an encoded CodeGeneratorResponse to stdout. +message CodeGeneratorResponse { + // Error message. If non-empty, code generation failed. The plugin process + // should exit with status code zero even if it reports an error in this way. + // + // This should be used to indicate errors in .proto files which prevent the + // code generator from generating correct code. Errors which indicate a + // problem in protoc itself -- such as the input CodeGeneratorRequest being + // unparseable -- should be reported by writing a message to stderr and + // exiting with a non-zero status code. + optional string error = 1; + + // Represents a single generated file. + message File { + // The file name, relative to the output directory. The name must not + // contain "." or ".." components and must be relative, not be absolute (so, + // the file cannot lie outside the output directory). "/" must be used as + // the path separator, not "\". + // + // If the name is omitted, the content will be appended to the previous + // file. This allows the generator to break large files into small chunks, + // and allows the generated text to be streamed back to protoc so that large + // files need not reside completely in memory at one time. Note that as of + // this writing protoc does not optimize for this -- it will read the entire + // CodeGeneratorResponse before writing files to disk. + optional string name = 1; + + // If non-empty, indicates that the named file should already exist, and the + // content here is to be inserted into that file at a defined insertion + // point. This feature allows a code generator to extend the output + // produced by another code generator. The original generator may provide + // insertion points by placing special annotations in the file that look + // like: + // @@protoc_insertion_point(NAME) + // The annotation can have arbitrary text before and after it on the line, + // which allows it to be placed in a comment. NAME should be replaced with + // an identifier naming the point -- this is what other generators will use + // as the insertion_point. Code inserted at this point will be placed + // immediately above the line containing the insertion point (thus multiple + // insertions to the same point will come out in the order they were added). + // The double-@ is intended to make it unlikely that the generated code + // could contain things that look like insertion points by accident. + // + // For example, the C++ code generator places the following line in the + // .pb.h files that it generates: + // // @@protoc_insertion_point(namespace_scope) + // This line appears within the scope of the file's package namespace, but + // outside of any particular class. Another plugin can then specify the + // insertion_point "namespace_scope" to generate additional classes or + // other declarations that should be placed in this scope. + // + // Note that if the line containing the insertion point begins with + // whitespace, the same whitespace will be added to every line of the + // inserted text. This is useful for languages like Python, where + // indentation matters. In these languages, the insertion point comment + // should be indented the same amount as any inserted code will need to be + // in order to work correctly in that context. + // + // The code generator that generates the initial file and the one which + // inserts into it must both run as part of a single invocation of protoc. + // Code generators are executed in the order in which they appear on the + // command line. + // + // If |insertion_point| is present, |name| must also be present. + optional string insertion_point = 2; + + // The file contents. + optional string content = 15; + } + repeated File file = 15; +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/Makefile b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/Makefile new file mode 100644 index 000000000..a0bf9fefd --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/Makefile @@ -0,0 +1,73 @@ +# Go support for Protocol Buffers - Google's data interchange format +# +# Copyright 2010 The Go Authors. All rights reserved. +# https://github.com/golang/protobuf +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +all: + @echo run make test + +include ../../Make.protobuf + +test: golden testbuild + +#test: golden testbuild extension_test +# ./extension_test +# @echo PASS + +my_test/test.pb.go: my_test/test.proto + protoc --go_out=Mmulti/multi1.proto=github.com/golang/protobuf/protoc-gen-go/testdata/multi:. $< + +golden: + make -B my_test/test.pb.go + sed -i -e '/return.*fileDescriptor/d' my_test/test.pb.go + sed -i -e '/^var fileDescriptor/,/^}/d' my_test/test.pb.go + sed -i -e '/proto.RegisterFile.*fileDescriptor/d' my_test/test.pb.go + gofmt -w my_test/test.pb.go + diff -w my_test/test.pb.go my_test/test.pb.go.golden + +nuke: clean + +testbuild: regenerate + go test + +regenerate: + # Invoke protoc once to generate three independent .pb.go files in the same package. + protoc --go_out=. multi/multi1.proto multi/multi2.proto multi/multi3.proto + +#extension_test: extension_test.$O +# $(LD) -L. -o $@ $< + +#multi.a: multi3.pb.$O multi2.pb.$O multi1.pb.$O +# rm -f multi.a +# $(QUOTED_GOBIN)/gopack grc $@ $< + +#test.pb.go: imp.pb.go +#multi1.pb.go: multi2.pb.go multi3.pb.go +#main.$O: imp.pb.$O test.pb.$O multi.a +#extension_test.$O: extension_base.pb.$O extension_extra.pb.$O extension_user.pb.$O diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_base.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_base.proto new file mode 100644 index 000000000..94acfc1bc --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_base.proto @@ -0,0 +1,46 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2010 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto2"; + +package extension_base; + +message BaseMessage { + optional int32 height = 1; + extensions 4 to 9; + extensions 16 to max; +} + +// Another message that may be extended, using message_set_wire_format. +message OldStyleMessage { + option message_set_wire_format = true; + extensions 100 to max; +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_extra.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_extra.proto new file mode 100644 index 000000000..fca7f600c --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_extra.proto @@ -0,0 +1,38 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2011 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto2"; + +package extension_extra; + +message ExtraMessage { + optional int32 width = 1; +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_test.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_test.go new file mode 100644 index 000000000..86e9c118a --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_test.go @@ -0,0 +1,210 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2010 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Test that we can use protocol buffers that use extensions. + +package testdata + +/* + +import ( + "bytes" + "regexp" + "testing" + + "github.com/golang/protobuf/proto" + base "extension_base.pb" + user "extension_user.pb" +) + +func TestSingleFieldExtension(t *testing.T) { + bm := &base.BaseMessage{ + Height: proto.Int32(178), + } + + // Use extension within scope of another type. + vol := proto.Uint32(11) + err := proto.SetExtension(bm, user.E_LoudMessage_Volume, vol) + if err != nil { + t.Fatal("Failed setting extension:", err) + } + buf, err := proto.Marshal(bm) + if err != nil { + t.Fatal("Failed encoding message with extension:", err) + } + bm_new := new(base.BaseMessage) + if err := proto.Unmarshal(buf, bm_new); err != nil { + t.Fatal("Failed decoding message with extension:", err) + } + if !proto.HasExtension(bm_new, user.E_LoudMessage_Volume) { + t.Fatal("Decoded message didn't contain extension.") + } + vol_out, err := proto.GetExtension(bm_new, user.E_LoudMessage_Volume) + if err != nil { + t.Fatal("Failed getting extension:", err) + } + if v := vol_out.(*uint32); *v != *vol { + t.Errorf("vol_out = %v, expected %v", *v, *vol) + } + proto.ClearExtension(bm_new, user.E_LoudMessage_Volume) + if proto.HasExtension(bm_new, user.E_LoudMessage_Volume) { + t.Fatal("Failed clearing extension.") + } +} + +func TestMessageExtension(t *testing.T) { + bm := &base.BaseMessage{ + Height: proto.Int32(179), + } + + // Use extension that is itself a message. + um := &user.UserMessage{ + Name: proto.String("Dave"), + Rank: proto.String("Major"), + } + err := proto.SetExtension(bm, user.E_LoginMessage_UserMessage, um) + if err != nil { + t.Fatal("Failed setting extension:", err) + } + buf, err := proto.Marshal(bm) + if err != nil { + t.Fatal("Failed encoding message with extension:", err) + } + bm_new := new(base.BaseMessage) + if err := proto.Unmarshal(buf, bm_new); err != nil { + t.Fatal("Failed decoding message with extension:", err) + } + if !proto.HasExtension(bm_new, user.E_LoginMessage_UserMessage) { + t.Fatal("Decoded message didn't contain extension.") + } + um_out, err := proto.GetExtension(bm_new, user.E_LoginMessage_UserMessage) + if err != nil { + t.Fatal("Failed getting extension:", err) + } + if n := um_out.(*user.UserMessage).Name; *n != *um.Name { + t.Errorf("um_out.Name = %q, expected %q", *n, *um.Name) + } + if r := um_out.(*user.UserMessage).Rank; *r != *um.Rank { + t.Errorf("um_out.Rank = %q, expected %q", *r, *um.Rank) + } + proto.ClearExtension(bm_new, user.E_LoginMessage_UserMessage) + if proto.HasExtension(bm_new, user.E_LoginMessage_UserMessage) { + t.Fatal("Failed clearing extension.") + } +} + +func TestTopLevelExtension(t *testing.T) { + bm := &base.BaseMessage{ + Height: proto.Int32(179), + } + + width := proto.Int32(17) + err := proto.SetExtension(bm, user.E_Width, width) + if err != nil { + t.Fatal("Failed setting extension:", err) + } + buf, err := proto.Marshal(bm) + if err != nil { + t.Fatal("Failed encoding message with extension:", err) + } + bm_new := new(base.BaseMessage) + if err := proto.Unmarshal(buf, bm_new); err != nil { + t.Fatal("Failed decoding message with extension:", err) + } + if !proto.HasExtension(bm_new, user.E_Width) { + t.Fatal("Decoded message didn't contain extension.") + } + width_out, err := proto.GetExtension(bm_new, user.E_Width) + if err != nil { + t.Fatal("Failed getting extension:", err) + } + if w := width_out.(*int32); *w != *width { + t.Errorf("width_out = %v, expected %v", *w, *width) + } + proto.ClearExtension(bm_new, user.E_Width) + if proto.HasExtension(bm_new, user.E_Width) { + t.Fatal("Failed clearing extension.") + } +} + +func TestMessageSetWireFormat(t *testing.T) { + osm := new(base.OldStyleMessage) + osp := &user.OldStyleParcel{ + Name: proto.String("Dave"), + Height: proto.Int32(178), + } + + err := proto.SetExtension(osm, user.E_OldStyleParcel_MessageSetExtension, osp) + if err != nil { + t.Fatal("Failed setting extension:", err) + } + + buf, err := proto.Marshal(osm) + if err != nil { + t.Fatal("Failed encoding message:", err) + } + + // Data generated from Python implementation. + expected := []byte{ + 11, 16, 209, 15, 26, 9, 10, 4, 68, 97, 118, 101, 16, 178, 1, 12, + } + + if !bytes.Equal(expected, buf) { + t.Errorf("Encoding mismatch.\nwant %+v\n got %+v", expected, buf) + } + + // Check that it is restored correctly. + osm = new(base.OldStyleMessage) + if err := proto.Unmarshal(buf, osm); err != nil { + t.Fatal("Failed decoding message:", err) + } + osp_out, err := proto.GetExtension(osm, user.E_OldStyleParcel_MessageSetExtension) + if err != nil { + t.Fatal("Failed getting extension:", err) + } + osp = osp_out.(*user.OldStyleParcel) + if *osp.Name != "Dave" || *osp.Height != 178 { + t.Errorf("Retrieved extension from decoded message is not correct: %+v", osp) + } +} + +func main() { + // simpler than rigging up gotest + testing.Main(regexp.MatchString, []testing.InternalTest{ + {"TestSingleFieldExtension", TestSingleFieldExtension}, + {"TestMessageExtension", TestMessageExtension}, + {"TestTopLevelExtension", TestTopLevelExtension}, + }, + []testing.InternalBenchmark{}, + []testing.InternalExample{}) +} + +*/ diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_user.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_user.proto new file mode 100644 index 000000000..ff65873dd --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/extension_user.proto @@ -0,0 +1,100 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2010 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto2"; + +import "extension_base.proto"; +import "extension_extra.proto"; + +package extension_user; + +message UserMessage { + optional string name = 1; + optional string rank = 2; +} + +// Extend with a message +extend extension_base.BaseMessage { + optional UserMessage user_message = 5; +} + +// Extend with a foreign message +extend extension_base.BaseMessage { + optional extension_extra.ExtraMessage extra_message = 9; +} + +// Extend with some primitive types +extend extension_base.BaseMessage { + optional int32 width = 6; + optional int64 area = 7; +} + +// Extend inside the scope of another type +message LoudMessage { + extend extension_base.BaseMessage { + optional uint32 volume = 8; + } + extensions 100 to max; +} + +// Extend inside the scope of another type, using a message. +message LoginMessage { + extend extension_base.BaseMessage { + optional UserMessage user_message = 16; + } +} + +// Extend with a repeated field +extend extension_base.BaseMessage { + repeated Detail detail = 17; +} + +message Detail { + optional string color = 1; +} + +// An extension of an extension +message Announcement { + optional string words = 1; + extend LoudMessage { + optional Announcement loud_ext = 100; + } +} + +// Something that can be put in a message set. +message OldStyleParcel { + extend extension_base.OldStyleMessage { + optional OldStyleParcel message_set_extension = 2001; + } + + required string name = 1; + optional int32 height = 2; +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/grpc.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/grpc.proto new file mode 100644 index 000000000..b8bc41acd --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/grpc.proto @@ -0,0 +1,59 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2015 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package grpc.testing; + +message SimpleRequest { +} + +message SimpleResponse { +} + +message StreamMsg { +} + +message StreamMsg2 { +} + +service Test { + rpc UnaryCall(SimpleRequest) returns (SimpleResponse); + + // This RPC streams from the server only. + rpc Downstream(SimpleRequest) returns (stream StreamMsg); + + // This RPC streams from the client. + rpc Upstream(stream StreamMsg) returns (SimpleResponse); + + // This one streams in both directions. + rpc Bidi(stream StreamMsg) returns (stream StreamMsg2); +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp.pb.go.golden b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp.pb.go.golden new file mode 100644 index 000000000..784a4f865 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp.pb.go.golden @@ -0,0 +1,113 @@ +// Code generated by protoc-gen-go. +// source: imp.proto +// DO NOT EDIT! + +package imp + +import proto "github.com/golang/protobuf/proto" +import "math" +import "os" +import imp1 "imp2.pb" + +// Reference proto & math imports to suppress error if they are not otherwise used. +var _ = proto.GetString +var _ = math.Inf + +// Types from public import imp2.proto +type PubliclyImportedMessage imp1.PubliclyImportedMessage + +func (this *PubliclyImportedMessage) Reset() { (*imp1.PubliclyImportedMessage)(this).Reset() } +func (this *PubliclyImportedMessage) String() string { + return (*imp1.PubliclyImportedMessage)(this).String() +} + +// PubliclyImportedMessage from public import imp.proto + +type ImportedMessage_Owner int32 + +const ( + ImportedMessage_DAVE ImportedMessage_Owner = 1 + ImportedMessage_MIKE ImportedMessage_Owner = 2 +) + +var ImportedMessage_Owner_name = map[int32]string{ + 1: "DAVE", + 2: "MIKE", +} +var ImportedMessage_Owner_value = map[string]int32{ + "DAVE": 1, + "MIKE": 2, +} + +// NewImportedMessage_Owner is deprecated. Use x.Enum() instead. +func NewImportedMessage_Owner(x ImportedMessage_Owner) *ImportedMessage_Owner { + e := ImportedMessage_Owner(x) + return &e +} +func (x ImportedMessage_Owner) Enum() *ImportedMessage_Owner { + p := new(ImportedMessage_Owner) + *p = x + return p +} +func (x ImportedMessage_Owner) String() string { + return proto.EnumName(ImportedMessage_Owner_name, int32(x)) +} + +type ImportedMessage struct { + Field *int64 `protobuf:"varint,1,req,name=field" json:"field,omitempty"` + XXX_extensions map[int32][]byte `json:",omitempty"` + XXX_unrecognized []byte `json:",omitempty"` +} + +func (this *ImportedMessage) Reset() { *this = ImportedMessage{} } +func (this *ImportedMessage) String() string { return proto.CompactTextString(this) } + +var extRange_ImportedMessage = []proto.ExtensionRange{ + proto.ExtensionRange{90, 100}, +} + +func (*ImportedMessage) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_ImportedMessage +} +func (this *ImportedMessage) ExtensionMap() map[int32][]byte { + if this.XXX_extensions == nil { + this.XXX_extensions = make(map[int32][]byte) + } + return this.XXX_extensions +} + +type ImportedExtendable struct { + XXX_extensions map[int32][]byte `json:",omitempty"` + XXX_unrecognized []byte `json:",omitempty"` +} + +func (this *ImportedExtendable) Reset() { *this = ImportedExtendable{} } +func (this *ImportedExtendable) String() string { return proto.CompactTextString(this) } + +func (this *ImportedExtendable) Marshal() ([]byte, error) { + return proto.MarshalMessageSet(this.ExtensionMap()) +} +func (this *ImportedExtendable) Unmarshal(buf []byte) error { + return proto.UnmarshalMessageSet(buf, this.ExtensionMap()) +} +// ensure ImportedExtendable satisfies proto.Marshaler and proto.Unmarshaler +var _ proto.Marshaler = (*ImportedExtendable)(nil) +var _ proto.Unmarshaler = (*ImportedExtendable)(nil) + +var extRange_ImportedExtendable = []proto.ExtensionRange{ + proto.ExtensionRange{100, 536870911}, +} + +func (*ImportedExtendable) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_ImportedExtendable +} +func (this *ImportedExtendable) ExtensionMap() map[int32][]byte { + if this.XXX_extensions == nil { + this.XXX_extensions = make(map[int32][]byte) + } + return this.XXX_extensions +} + +func init() { + proto.RegisterEnum("imp.ImportedMessage_Owner", ImportedMessage_Owner_name, ImportedMessage_Owner_value) +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp.proto new file mode 100644 index 000000000..156e078d1 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp.proto @@ -0,0 +1,70 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2010 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto2"; + +package imp; + +import "imp2.proto"; +import "imp3.proto"; + +message ImportedMessage { + required int64 field = 1; + + // The forwarded getters for these fields are fiddly to get right. + optional ImportedMessage2 local_msg = 2; + optional ForeignImportedMessage foreign_msg = 3; // in imp3.proto + optional Owner enum_field = 4; + oneof union { + int32 state = 9; + } + + repeated string name = 5; + repeated Owner boss = 6; + repeated ImportedMessage2 memo = 7; + + map msg_map = 8; + + enum Owner { + DAVE = 1; + MIKE = 2; + } + + extensions 90 to 100; +} + +message ImportedMessage2 { +} + +message ImportedExtendable { + option message_set_wire_format = true; + extensions 100 to max; +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp2.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp2.proto new file mode 100644 index 000000000..3bb0632b2 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp2.proto @@ -0,0 +1,43 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2011 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto2"; + +package imp; + +message PubliclyImportedMessage { + optional int64 field = 1; +} + +enum PubliclyImportedEnum { + GLASSES = 1; + HAIR = 2; +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp3.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp3.proto new file mode 100644 index 000000000..58fc7598b --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/imp3.proto @@ -0,0 +1,38 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2012 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto2"; + +package imp; + +message ForeignImportedMessage { + optional string tuber = 1; +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/main_test.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/main_test.go new file mode 100644 index 000000000..f9b5ccf20 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/main_test.go @@ -0,0 +1,46 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2010 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// A simple binary to link together the protocol buffers in this test. + +package testdata + +import ( + "testing" + + mytestpb "./my_test" + multipb "github.com/golang/protobuf/protoc-gen-go/testdata/multi" +) + +func TestLink(t *testing.T) { + _ = &multipb.Multi1{} + _ = &mytestpb.Request{} +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi1.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi1.proto new file mode 100644 index 000000000..0da6e0af4 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi1.proto @@ -0,0 +1,44 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2010 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto2"; + +import "multi/multi2.proto"; +import "multi/multi3.proto"; + +package multitest; + +message Multi1 { + required Multi2 multi2 = 1; + optional Multi2.Color color = 2; + optional Multi3.HatType hat_type = 3; +} + diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi2.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi2.proto new file mode 100644 index 000000000..e6bfc71b3 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi2.proto @@ -0,0 +1,46 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2010 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto2"; + +package multitest; + +message Multi2 { + required int32 required_value = 1; + + enum Color { + BLUE = 1; + GREEN = 2; + RED = 3; + }; + optional Color color = 2; +} + diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi3.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi3.proto new file mode 100644 index 000000000..146c255bd --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/multi/multi3.proto @@ -0,0 +1,43 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2010 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto2"; + +package multitest; + +message Multi3 { + enum HatType { + FEDORA = 1; + FEZ = 2; + }; + optional HatType hat_type = 1; +} + diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/my_test/test.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/my_test/test.pb.go new file mode 100644 index 000000000..1954e3fb7 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/my_test/test.pb.go @@ -0,0 +1,870 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: my_test/test.proto + +/* +Package my_test is a generated protocol buffer package. + +This package holds interesting messages. + +It is generated from these files: + my_test/test.proto + +It has these top-level messages: + Request + Reply + OtherBase + ReplyExtensions + OtherReplyExtensions + OldReply + Communique +*/ +package my_test + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/protoc-gen-go/testdata/multi" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type HatType int32 + +const ( + // deliberately skipping 0 + HatType_FEDORA HatType = 1 + HatType_FEZ HatType = 2 +) + +var HatType_name = map[int32]string{ + 1: "FEDORA", + 2: "FEZ", +} +var HatType_value = map[string]int32{ + "FEDORA": 1, + "FEZ": 2, +} + +func (x HatType) Enum() *HatType { + p := new(HatType) + *p = x + return p +} +func (x HatType) String() string { + return proto.EnumName(HatType_name, int32(x)) +} +func (x *HatType) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(HatType_value, data, "HatType") + if err != nil { + return err + } + *x = HatType(value) + return nil +} + +// This enum represents days of the week. +type Days int32 + +const ( + Days_MONDAY Days = 1 + Days_TUESDAY Days = 2 + Days_LUNDI Days = 1 +) + +var Days_name = map[int32]string{ + 1: "MONDAY", + 2: "TUESDAY", + // Duplicate value: 1: "LUNDI", +} +var Days_value = map[string]int32{ + "MONDAY": 1, + "TUESDAY": 2, + "LUNDI": 1, +} + +func (x Days) Enum() *Days { + p := new(Days) + *p = x + return p +} +func (x Days) String() string { + return proto.EnumName(Days_name, int32(x)) +} +func (x *Days) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(Days_value, data, "Days") + if err != nil { + return err + } + *x = Days(value) + return nil +} + +type Request_Color int32 + +const ( + Request_RED Request_Color = 0 + Request_GREEN Request_Color = 1 + Request_BLUE Request_Color = 2 +) + +var Request_Color_name = map[int32]string{ + 0: "RED", + 1: "GREEN", + 2: "BLUE", +} +var Request_Color_value = map[string]int32{ + "RED": 0, + "GREEN": 1, + "BLUE": 2, +} + +func (x Request_Color) Enum() *Request_Color { + p := new(Request_Color) + *p = x + return p +} +func (x Request_Color) String() string { + return proto.EnumName(Request_Color_name, int32(x)) +} +func (x *Request_Color) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(Request_Color_value, data, "Request_Color") + if err != nil { + return err + } + *x = Request_Color(value) + return nil +} + +type Reply_Entry_Game int32 + +const ( + Reply_Entry_FOOTBALL Reply_Entry_Game = 1 + Reply_Entry_TENNIS Reply_Entry_Game = 2 +) + +var Reply_Entry_Game_name = map[int32]string{ + 1: "FOOTBALL", + 2: "TENNIS", +} +var Reply_Entry_Game_value = map[string]int32{ + "FOOTBALL": 1, + "TENNIS": 2, +} + +func (x Reply_Entry_Game) Enum() *Reply_Entry_Game { + p := new(Reply_Entry_Game) + *p = x + return p +} +func (x Reply_Entry_Game) String() string { + return proto.EnumName(Reply_Entry_Game_name, int32(x)) +} +func (x *Reply_Entry_Game) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(Reply_Entry_Game_value, data, "Reply_Entry_Game") + if err != nil { + return err + } + *x = Reply_Entry_Game(value) + return nil +} + +// This is a message that might be sent somewhere. +type Request struct { + Key []int64 `protobuf:"varint,1,rep,name=key" json:"key,omitempty"` + // optional imp.ImportedMessage imported_message = 2; + Hue *Request_Color `protobuf:"varint,3,opt,name=hue,enum=my.test.Request_Color" json:"hue,omitempty"` + Hat *HatType `protobuf:"varint,4,opt,name=hat,enum=my.test.HatType,def=1" json:"hat,omitempty"` + // optional imp.ImportedMessage.Owner owner = 6; + Deadline *float32 `protobuf:"fixed32,7,opt,name=deadline,def=inf" json:"deadline,omitempty"` + Somegroup *Request_SomeGroup `protobuf:"group,8,opt,name=SomeGroup,json=somegroup" json:"somegroup,omitempty"` + // This is a map field. It will generate map[int32]string. + NameMapping map[int32]string `protobuf:"bytes,14,rep,name=name_mapping,json=nameMapping" json:"name_mapping,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + // This is a map field whose value type is a message. + MsgMapping map[int64]*Reply `protobuf:"bytes,15,rep,name=msg_mapping,json=msgMapping" json:"msg_mapping,omitempty" protobuf_key:"zigzag64,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Reset_ *int32 `protobuf:"varint,12,opt,name=reset" json:"reset,omitempty"` + // This field should not conflict with any getters. + GetKey_ *string `protobuf:"bytes,16,opt,name=get_key,json=getKey" json:"get_key,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *Request) Reset() { *m = Request{} } +func (m *Request) String() string { return proto.CompactTextString(m) } +func (*Request) ProtoMessage() {} + +const Default_Request_Hat HatType = HatType_FEDORA + +var Default_Request_Deadline float32 = float32(math.Inf(1)) + +func (m *Request) GetKey() []int64 { + if m != nil { + return m.Key + } + return nil +} + +func (m *Request) GetHue() Request_Color { + if m != nil && m.Hue != nil { + return *m.Hue + } + return Request_RED +} + +func (m *Request) GetHat() HatType { + if m != nil && m.Hat != nil { + return *m.Hat + } + return Default_Request_Hat +} + +func (m *Request) GetDeadline() float32 { + if m != nil && m.Deadline != nil { + return *m.Deadline + } + return Default_Request_Deadline +} + +func (m *Request) GetSomegroup() *Request_SomeGroup { + if m != nil { + return m.Somegroup + } + return nil +} + +func (m *Request) GetNameMapping() map[int32]string { + if m != nil { + return m.NameMapping + } + return nil +} + +func (m *Request) GetMsgMapping() map[int64]*Reply { + if m != nil { + return m.MsgMapping + } + return nil +} + +func (m *Request) GetReset_() int32 { + if m != nil && m.Reset_ != nil { + return *m.Reset_ + } + return 0 +} + +func (m *Request) GetGetKey_() string { + if m != nil && m.GetKey_ != nil { + return *m.GetKey_ + } + return "" +} + +type Request_SomeGroup struct { + GroupField *int32 `protobuf:"varint,9,opt,name=group_field,json=groupField" json:"group_field,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *Request_SomeGroup) Reset() { *m = Request_SomeGroup{} } +func (m *Request_SomeGroup) String() string { return proto.CompactTextString(m) } +func (*Request_SomeGroup) ProtoMessage() {} + +func (m *Request_SomeGroup) GetGroupField() int32 { + if m != nil && m.GroupField != nil { + return *m.GroupField + } + return 0 +} + +type Reply struct { + Found []*Reply_Entry `protobuf:"bytes,1,rep,name=found" json:"found,omitempty"` + CompactKeys []int32 `protobuf:"varint,2,rep,packed,name=compact_keys,json=compactKeys" json:"compact_keys,omitempty"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *Reply) Reset() { *m = Reply{} } +func (m *Reply) String() string { return proto.CompactTextString(m) } +func (*Reply) ProtoMessage() {} + +var extRange_Reply = []proto.ExtensionRange{ + {100, 536870911}, +} + +func (*Reply) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_Reply +} + +func (m *Reply) GetFound() []*Reply_Entry { + if m != nil { + return m.Found + } + return nil +} + +func (m *Reply) GetCompactKeys() []int32 { + if m != nil { + return m.CompactKeys + } + return nil +} + +type Reply_Entry struct { + KeyThatNeeds_1234Camel_CasIng *int64 `protobuf:"varint,1,req,name=key_that_needs_1234camel_CasIng,json=keyThatNeeds1234camelCasIng" json:"key_that_needs_1234camel_CasIng,omitempty"` + Value *int64 `protobuf:"varint,2,opt,name=value,def=7" json:"value,omitempty"` + XMyFieldName_2 *int64 `protobuf:"varint,3,opt,name=_my_field_name_2,json=MyFieldName2" json:"_my_field_name_2,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *Reply_Entry) Reset() { *m = Reply_Entry{} } +func (m *Reply_Entry) String() string { return proto.CompactTextString(m) } +func (*Reply_Entry) ProtoMessage() {} + +const Default_Reply_Entry_Value int64 = 7 + +func (m *Reply_Entry) GetKeyThatNeeds_1234Camel_CasIng() int64 { + if m != nil && m.KeyThatNeeds_1234Camel_CasIng != nil { + return *m.KeyThatNeeds_1234Camel_CasIng + } + return 0 +} + +func (m *Reply_Entry) GetValue() int64 { + if m != nil && m.Value != nil { + return *m.Value + } + return Default_Reply_Entry_Value +} + +func (m *Reply_Entry) GetXMyFieldName_2() int64 { + if m != nil && m.XMyFieldName_2 != nil { + return *m.XMyFieldName_2 + } + return 0 +} + +type OtherBase struct { + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *OtherBase) Reset() { *m = OtherBase{} } +func (m *OtherBase) String() string { return proto.CompactTextString(m) } +func (*OtherBase) ProtoMessage() {} + +var extRange_OtherBase = []proto.ExtensionRange{ + {100, 536870911}, +} + +func (*OtherBase) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_OtherBase +} + +func (m *OtherBase) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +type ReplyExtensions struct { + XXX_unrecognized []byte `json:"-"` +} + +func (m *ReplyExtensions) Reset() { *m = ReplyExtensions{} } +func (m *ReplyExtensions) String() string { return proto.CompactTextString(m) } +func (*ReplyExtensions) ProtoMessage() {} + +var E_ReplyExtensions_Time = &proto.ExtensionDesc{ + ExtendedType: (*Reply)(nil), + ExtensionType: (*float64)(nil), + Field: 101, + Name: "my.test.ReplyExtensions.time", + Tag: "fixed64,101,opt,name=time", + Filename: "my_test/test.proto", +} + +var E_ReplyExtensions_Carrot = &proto.ExtensionDesc{ + ExtendedType: (*Reply)(nil), + ExtensionType: (*ReplyExtensions)(nil), + Field: 105, + Name: "my.test.ReplyExtensions.carrot", + Tag: "bytes,105,opt,name=carrot", + Filename: "my_test/test.proto", +} + +var E_ReplyExtensions_Donut = &proto.ExtensionDesc{ + ExtendedType: (*OtherBase)(nil), + ExtensionType: (*ReplyExtensions)(nil), + Field: 101, + Name: "my.test.ReplyExtensions.donut", + Tag: "bytes,101,opt,name=donut", + Filename: "my_test/test.proto", +} + +type OtherReplyExtensions struct { + Key *int32 `protobuf:"varint,1,opt,name=key" json:"key,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *OtherReplyExtensions) Reset() { *m = OtherReplyExtensions{} } +func (m *OtherReplyExtensions) String() string { return proto.CompactTextString(m) } +func (*OtherReplyExtensions) ProtoMessage() {} + +func (m *OtherReplyExtensions) GetKey() int32 { + if m != nil && m.Key != nil { + return *m.Key + } + return 0 +} + +type OldReply struct { + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *OldReply) Reset() { *m = OldReply{} } +func (m *OldReply) String() string { return proto.CompactTextString(m) } +func (*OldReply) ProtoMessage() {} + +func (m *OldReply) Marshal() ([]byte, error) { + return proto.MarshalMessageSet(&m.XXX_InternalExtensions) +} +func (m *OldReply) Unmarshal(buf []byte) error { + return proto.UnmarshalMessageSet(buf, &m.XXX_InternalExtensions) +} +func (m *OldReply) MarshalJSON() ([]byte, error) { + return proto.MarshalMessageSetJSON(&m.XXX_InternalExtensions) +} +func (m *OldReply) UnmarshalJSON(buf []byte) error { + return proto.UnmarshalMessageSetJSON(buf, &m.XXX_InternalExtensions) +} + +// ensure OldReply satisfies proto.Marshaler and proto.Unmarshaler +var _ proto.Marshaler = (*OldReply)(nil) +var _ proto.Unmarshaler = (*OldReply)(nil) + +var extRange_OldReply = []proto.ExtensionRange{ + {100, 2147483646}, +} + +func (*OldReply) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_OldReply +} + +type Communique struct { + MakeMeCry *bool `protobuf:"varint,1,opt,name=make_me_cry,json=makeMeCry" json:"make_me_cry,omitempty"` + // This is a oneof, called "union". + // + // Types that are valid to be assigned to Union: + // *Communique_Number + // *Communique_Name + // *Communique_Data + // *Communique_TempC + // *Communique_Height + // *Communique_Today + // *Communique_Maybe + // *Communique_Delta_ + // *Communique_Msg + // *Communique_Somegroup + Union isCommunique_Union `protobuf_oneof:"union"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *Communique) Reset() { *m = Communique{} } +func (m *Communique) String() string { return proto.CompactTextString(m) } +func (*Communique) ProtoMessage() {} + +type isCommunique_Union interface { + isCommunique_Union() +} + +type Communique_Number struct { + Number int32 `protobuf:"varint,5,opt,name=number,oneof"` +} +type Communique_Name struct { + Name string `protobuf:"bytes,6,opt,name=name,oneof"` +} +type Communique_Data struct { + Data []byte `protobuf:"bytes,7,opt,name=data,oneof"` +} +type Communique_TempC struct { + TempC float64 `protobuf:"fixed64,8,opt,name=temp_c,json=tempC,oneof"` +} +type Communique_Height struct { + Height float32 `protobuf:"fixed32,9,opt,name=height,oneof"` +} +type Communique_Today struct { + Today Days `protobuf:"varint,10,opt,name=today,enum=my.test.Days,oneof"` +} +type Communique_Maybe struct { + Maybe bool `protobuf:"varint,11,opt,name=maybe,oneof"` +} +type Communique_Delta_ struct { + Delta int32 `protobuf:"zigzag32,12,opt,name=delta,oneof"` +} +type Communique_Msg struct { + Msg *Reply `protobuf:"bytes,13,opt,name=msg,oneof"` +} +type Communique_Somegroup struct { + Somegroup *Communique_SomeGroup `protobuf:"group,14,opt,name=SomeGroup,json=somegroup,oneof"` +} + +func (*Communique_Number) isCommunique_Union() {} +func (*Communique_Name) isCommunique_Union() {} +func (*Communique_Data) isCommunique_Union() {} +func (*Communique_TempC) isCommunique_Union() {} +func (*Communique_Height) isCommunique_Union() {} +func (*Communique_Today) isCommunique_Union() {} +func (*Communique_Maybe) isCommunique_Union() {} +func (*Communique_Delta_) isCommunique_Union() {} +func (*Communique_Msg) isCommunique_Union() {} +func (*Communique_Somegroup) isCommunique_Union() {} + +func (m *Communique) GetUnion() isCommunique_Union { + if m != nil { + return m.Union + } + return nil +} + +func (m *Communique) GetMakeMeCry() bool { + if m != nil && m.MakeMeCry != nil { + return *m.MakeMeCry + } + return false +} + +func (m *Communique) GetNumber() int32 { + if x, ok := m.GetUnion().(*Communique_Number); ok { + return x.Number + } + return 0 +} + +func (m *Communique) GetName() string { + if x, ok := m.GetUnion().(*Communique_Name); ok { + return x.Name + } + return "" +} + +func (m *Communique) GetData() []byte { + if x, ok := m.GetUnion().(*Communique_Data); ok { + return x.Data + } + return nil +} + +func (m *Communique) GetTempC() float64 { + if x, ok := m.GetUnion().(*Communique_TempC); ok { + return x.TempC + } + return 0 +} + +func (m *Communique) GetHeight() float32 { + if x, ok := m.GetUnion().(*Communique_Height); ok { + return x.Height + } + return 0 +} + +func (m *Communique) GetToday() Days { + if x, ok := m.GetUnion().(*Communique_Today); ok { + return x.Today + } + return Days_MONDAY +} + +func (m *Communique) GetMaybe() bool { + if x, ok := m.GetUnion().(*Communique_Maybe); ok { + return x.Maybe + } + return false +} + +func (m *Communique) GetDelta() int32 { + if x, ok := m.GetUnion().(*Communique_Delta_); ok { + return x.Delta + } + return 0 +} + +func (m *Communique) GetMsg() *Reply { + if x, ok := m.GetUnion().(*Communique_Msg); ok { + return x.Msg + } + return nil +} + +func (m *Communique) GetSomegroup() *Communique_SomeGroup { + if x, ok := m.GetUnion().(*Communique_Somegroup); ok { + return x.Somegroup + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Communique) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Communique_OneofMarshaler, _Communique_OneofUnmarshaler, _Communique_OneofSizer, []interface{}{ + (*Communique_Number)(nil), + (*Communique_Name)(nil), + (*Communique_Data)(nil), + (*Communique_TempC)(nil), + (*Communique_Height)(nil), + (*Communique_Today)(nil), + (*Communique_Maybe)(nil), + (*Communique_Delta_)(nil), + (*Communique_Msg)(nil), + (*Communique_Somegroup)(nil), + } +} + +func _Communique_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Communique) + // union + switch x := m.Union.(type) { + case *Communique_Number: + b.EncodeVarint(5<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Number)) + case *Communique_Name: + b.EncodeVarint(6<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Name) + case *Communique_Data: + b.EncodeVarint(7<<3 | proto.WireBytes) + b.EncodeRawBytes(x.Data) + case *Communique_TempC: + b.EncodeVarint(8<<3 | proto.WireFixed64) + b.EncodeFixed64(math.Float64bits(x.TempC)) + case *Communique_Height: + b.EncodeVarint(9<<3 | proto.WireFixed32) + b.EncodeFixed32(uint64(math.Float32bits(x.Height))) + case *Communique_Today: + b.EncodeVarint(10<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Today)) + case *Communique_Maybe: + t := uint64(0) + if x.Maybe { + t = 1 + } + b.EncodeVarint(11<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *Communique_Delta_: + b.EncodeVarint(12<<3 | proto.WireVarint) + b.EncodeZigzag32(uint64(x.Delta)) + case *Communique_Msg: + b.EncodeVarint(13<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Msg); err != nil { + return err + } + case *Communique_Somegroup: + b.EncodeVarint(14<<3 | proto.WireStartGroup) + if err := b.Marshal(x.Somegroup); err != nil { + return err + } + b.EncodeVarint(14<<3 | proto.WireEndGroup) + case nil: + default: + return fmt.Errorf("Communique.Union has unexpected type %T", x) + } + return nil +} + +func _Communique_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Communique) + switch tag { + case 5: // union.number + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Union = &Communique_Number{int32(x)} + return true, err + case 6: // union.name + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Union = &Communique_Name{x} + return true, err + case 7: // union.data + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Union = &Communique_Data{x} + return true, err + case 8: // union.temp_c + if wire != proto.WireFixed64 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed64() + m.Union = &Communique_TempC{math.Float64frombits(x)} + return true, err + case 9: // union.height + if wire != proto.WireFixed32 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed32() + m.Union = &Communique_Height{math.Float32frombits(uint32(x))} + return true, err + case 10: // union.today + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Union = &Communique_Today{Days(x)} + return true, err + case 11: // union.maybe + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Union = &Communique_Maybe{x != 0} + return true, err + case 12: // union.delta + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeZigzag32() + m.Union = &Communique_Delta_{int32(x)} + return true, err + case 13: // union.msg + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Reply) + err := b.DecodeMessage(msg) + m.Union = &Communique_Msg{msg} + return true, err + case 14: // union.somegroup + if wire != proto.WireStartGroup { + return true, proto.ErrInternalBadWireType + } + msg := new(Communique_SomeGroup) + err := b.DecodeGroup(msg) + m.Union = &Communique_Somegroup{msg} + return true, err + default: + return false, nil + } +} + +func _Communique_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Communique) + // union + switch x := m.Union.(type) { + case *Communique_Number: + n += proto.SizeVarint(5<<3 | proto.WireVarint) + n += proto.SizeVarint(uint64(x.Number)) + case *Communique_Name: + n += proto.SizeVarint(6<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(len(x.Name))) + n += len(x.Name) + case *Communique_Data: + n += proto.SizeVarint(7<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(len(x.Data))) + n += len(x.Data) + case *Communique_TempC: + n += proto.SizeVarint(8<<3 | proto.WireFixed64) + n += 8 + case *Communique_Height: + n += proto.SizeVarint(9<<3 | proto.WireFixed32) + n += 4 + case *Communique_Today: + n += proto.SizeVarint(10<<3 | proto.WireVarint) + n += proto.SizeVarint(uint64(x.Today)) + case *Communique_Maybe: + n += proto.SizeVarint(11<<3 | proto.WireVarint) + n += 1 + case *Communique_Delta_: + n += proto.SizeVarint(12<<3 | proto.WireVarint) + n += proto.SizeVarint(uint64((uint32(x.Delta) << 1) ^ uint32((int32(x.Delta) >> 31)))) + case *Communique_Msg: + s := proto.Size(x.Msg) + n += proto.SizeVarint(13<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *Communique_Somegroup: + n += proto.SizeVarint(14<<3 | proto.WireStartGroup) + n += proto.Size(x.Somegroup) + n += proto.SizeVarint(14<<3 | proto.WireEndGroup) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type Communique_SomeGroup struct { + Member *string `protobuf:"bytes,15,opt,name=member" json:"member,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *Communique_SomeGroup) Reset() { *m = Communique_SomeGroup{} } +func (m *Communique_SomeGroup) String() string { return proto.CompactTextString(m) } +func (*Communique_SomeGroup) ProtoMessage() {} + +func (m *Communique_SomeGroup) GetMember() string { + if m != nil && m.Member != nil { + return *m.Member + } + return "" +} + +type Communique_Delta struct { + XXX_unrecognized []byte `json:"-"` +} + +func (m *Communique_Delta) Reset() { *m = Communique_Delta{} } +func (m *Communique_Delta) String() string { return proto.CompactTextString(m) } +func (*Communique_Delta) ProtoMessage() {} + +var E_Tag = &proto.ExtensionDesc{ + ExtendedType: (*Reply)(nil), + ExtensionType: (*string)(nil), + Field: 103, + Name: "my.test.tag", + Tag: "bytes,103,opt,name=tag", + Filename: "my_test/test.proto", +} + +var E_Donut = &proto.ExtensionDesc{ + ExtendedType: (*Reply)(nil), + ExtensionType: (*OtherReplyExtensions)(nil), + Field: 106, + Name: "my.test.donut", + Tag: "bytes,106,opt,name=donut", + Filename: "my_test/test.proto", +} + +func init() { + proto.RegisterType((*Request)(nil), "my.test.Request") + proto.RegisterType((*Request_SomeGroup)(nil), "my.test.Request.SomeGroup") + proto.RegisterType((*Reply)(nil), "my.test.Reply") + proto.RegisterType((*Reply_Entry)(nil), "my.test.Reply.Entry") + proto.RegisterType((*OtherBase)(nil), "my.test.OtherBase") + proto.RegisterType((*ReplyExtensions)(nil), "my.test.ReplyExtensions") + proto.RegisterType((*OtherReplyExtensions)(nil), "my.test.OtherReplyExtensions") + proto.RegisterType((*OldReply)(nil), "my.test.OldReply") + proto.RegisterType((*Communique)(nil), "my.test.Communique") + proto.RegisterType((*Communique_SomeGroup)(nil), "my.test.Communique.SomeGroup") + proto.RegisterType((*Communique_Delta)(nil), "my.test.Communique.Delta") + proto.RegisterEnum("my.test.HatType", HatType_name, HatType_value) + proto.RegisterEnum("my.test.Days", Days_name, Days_value) + proto.RegisterEnum("my.test.Request_Color", Request_Color_name, Request_Color_value) + proto.RegisterEnum("my.test.Reply_Entry_Game", Reply_Entry_Game_name, Reply_Entry_Game_value) + proto.RegisterExtension(E_ReplyExtensions_Time) + proto.RegisterExtension(E_ReplyExtensions_Carrot) + proto.RegisterExtension(E_ReplyExtensions_Donut) + proto.RegisterExtension(E_Tag) + proto.RegisterExtension(E_Donut) +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/my_test/test.pb.go.golden b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/my_test/test.pb.go.golden new file mode 100644 index 000000000..1954e3fb7 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/my_test/test.pb.go.golden @@ -0,0 +1,870 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: my_test/test.proto + +/* +Package my_test is a generated protocol buffer package. + +This package holds interesting messages. + +It is generated from these files: + my_test/test.proto + +It has these top-level messages: + Request + Reply + OtherBase + ReplyExtensions + OtherReplyExtensions + OldReply + Communique +*/ +package my_test + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/protoc-gen-go/testdata/multi" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type HatType int32 + +const ( + // deliberately skipping 0 + HatType_FEDORA HatType = 1 + HatType_FEZ HatType = 2 +) + +var HatType_name = map[int32]string{ + 1: "FEDORA", + 2: "FEZ", +} +var HatType_value = map[string]int32{ + "FEDORA": 1, + "FEZ": 2, +} + +func (x HatType) Enum() *HatType { + p := new(HatType) + *p = x + return p +} +func (x HatType) String() string { + return proto.EnumName(HatType_name, int32(x)) +} +func (x *HatType) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(HatType_value, data, "HatType") + if err != nil { + return err + } + *x = HatType(value) + return nil +} + +// This enum represents days of the week. +type Days int32 + +const ( + Days_MONDAY Days = 1 + Days_TUESDAY Days = 2 + Days_LUNDI Days = 1 +) + +var Days_name = map[int32]string{ + 1: "MONDAY", + 2: "TUESDAY", + // Duplicate value: 1: "LUNDI", +} +var Days_value = map[string]int32{ + "MONDAY": 1, + "TUESDAY": 2, + "LUNDI": 1, +} + +func (x Days) Enum() *Days { + p := new(Days) + *p = x + return p +} +func (x Days) String() string { + return proto.EnumName(Days_name, int32(x)) +} +func (x *Days) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(Days_value, data, "Days") + if err != nil { + return err + } + *x = Days(value) + return nil +} + +type Request_Color int32 + +const ( + Request_RED Request_Color = 0 + Request_GREEN Request_Color = 1 + Request_BLUE Request_Color = 2 +) + +var Request_Color_name = map[int32]string{ + 0: "RED", + 1: "GREEN", + 2: "BLUE", +} +var Request_Color_value = map[string]int32{ + "RED": 0, + "GREEN": 1, + "BLUE": 2, +} + +func (x Request_Color) Enum() *Request_Color { + p := new(Request_Color) + *p = x + return p +} +func (x Request_Color) String() string { + return proto.EnumName(Request_Color_name, int32(x)) +} +func (x *Request_Color) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(Request_Color_value, data, "Request_Color") + if err != nil { + return err + } + *x = Request_Color(value) + return nil +} + +type Reply_Entry_Game int32 + +const ( + Reply_Entry_FOOTBALL Reply_Entry_Game = 1 + Reply_Entry_TENNIS Reply_Entry_Game = 2 +) + +var Reply_Entry_Game_name = map[int32]string{ + 1: "FOOTBALL", + 2: "TENNIS", +} +var Reply_Entry_Game_value = map[string]int32{ + "FOOTBALL": 1, + "TENNIS": 2, +} + +func (x Reply_Entry_Game) Enum() *Reply_Entry_Game { + p := new(Reply_Entry_Game) + *p = x + return p +} +func (x Reply_Entry_Game) String() string { + return proto.EnumName(Reply_Entry_Game_name, int32(x)) +} +func (x *Reply_Entry_Game) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(Reply_Entry_Game_value, data, "Reply_Entry_Game") + if err != nil { + return err + } + *x = Reply_Entry_Game(value) + return nil +} + +// This is a message that might be sent somewhere. +type Request struct { + Key []int64 `protobuf:"varint,1,rep,name=key" json:"key,omitempty"` + // optional imp.ImportedMessage imported_message = 2; + Hue *Request_Color `protobuf:"varint,3,opt,name=hue,enum=my.test.Request_Color" json:"hue,omitempty"` + Hat *HatType `protobuf:"varint,4,opt,name=hat,enum=my.test.HatType,def=1" json:"hat,omitempty"` + // optional imp.ImportedMessage.Owner owner = 6; + Deadline *float32 `protobuf:"fixed32,7,opt,name=deadline,def=inf" json:"deadline,omitempty"` + Somegroup *Request_SomeGroup `protobuf:"group,8,opt,name=SomeGroup,json=somegroup" json:"somegroup,omitempty"` + // This is a map field. It will generate map[int32]string. + NameMapping map[int32]string `protobuf:"bytes,14,rep,name=name_mapping,json=nameMapping" json:"name_mapping,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + // This is a map field whose value type is a message. + MsgMapping map[int64]*Reply `protobuf:"bytes,15,rep,name=msg_mapping,json=msgMapping" json:"msg_mapping,omitempty" protobuf_key:"zigzag64,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Reset_ *int32 `protobuf:"varint,12,opt,name=reset" json:"reset,omitempty"` + // This field should not conflict with any getters. + GetKey_ *string `protobuf:"bytes,16,opt,name=get_key,json=getKey" json:"get_key,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *Request) Reset() { *m = Request{} } +func (m *Request) String() string { return proto.CompactTextString(m) } +func (*Request) ProtoMessage() {} + +const Default_Request_Hat HatType = HatType_FEDORA + +var Default_Request_Deadline float32 = float32(math.Inf(1)) + +func (m *Request) GetKey() []int64 { + if m != nil { + return m.Key + } + return nil +} + +func (m *Request) GetHue() Request_Color { + if m != nil && m.Hue != nil { + return *m.Hue + } + return Request_RED +} + +func (m *Request) GetHat() HatType { + if m != nil && m.Hat != nil { + return *m.Hat + } + return Default_Request_Hat +} + +func (m *Request) GetDeadline() float32 { + if m != nil && m.Deadline != nil { + return *m.Deadline + } + return Default_Request_Deadline +} + +func (m *Request) GetSomegroup() *Request_SomeGroup { + if m != nil { + return m.Somegroup + } + return nil +} + +func (m *Request) GetNameMapping() map[int32]string { + if m != nil { + return m.NameMapping + } + return nil +} + +func (m *Request) GetMsgMapping() map[int64]*Reply { + if m != nil { + return m.MsgMapping + } + return nil +} + +func (m *Request) GetReset_() int32 { + if m != nil && m.Reset_ != nil { + return *m.Reset_ + } + return 0 +} + +func (m *Request) GetGetKey_() string { + if m != nil && m.GetKey_ != nil { + return *m.GetKey_ + } + return "" +} + +type Request_SomeGroup struct { + GroupField *int32 `protobuf:"varint,9,opt,name=group_field,json=groupField" json:"group_field,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *Request_SomeGroup) Reset() { *m = Request_SomeGroup{} } +func (m *Request_SomeGroup) String() string { return proto.CompactTextString(m) } +func (*Request_SomeGroup) ProtoMessage() {} + +func (m *Request_SomeGroup) GetGroupField() int32 { + if m != nil && m.GroupField != nil { + return *m.GroupField + } + return 0 +} + +type Reply struct { + Found []*Reply_Entry `protobuf:"bytes,1,rep,name=found" json:"found,omitempty"` + CompactKeys []int32 `protobuf:"varint,2,rep,packed,name=compact_keys,json=compactKeys" json:"compact_keys,omitempty"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *Reply) Reset() { *m = Reply{} } +func (m *Reply) String() string { return proto.CompactTextString(m) } +func (*Reply) ProtoMessage() {} + +var extRange_Reply = []proto.ExtensionRange{ + {100, 536870911}, +} + +func (*Reply) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_Reply +} + +func (m *Reply) GetFound() []*Reply_Entry { + if m != nil { + return m.Found + } + return nil +} + +func (m *Reply) GetCompactKeys() []int32 { + if m != nil { + return m.CompactKeys + } + return nil +} + +type Reply_Entry struct { + KeyThatNeeds_1234Camel_CasIng *int64 `protobuf:"varint,1,req,name=key_that_needs_1234camel_CasIng,json=keyThatNeeds1234camelCasIng" json:"key_that_needs_1234camel_CasIng,omitempty"` + Value *int64 `protobuf:"varint,2,opt,name=value,def=7" json:"value,omitempty"` + XMyFieldName_2 *int64 `protobuf:"varint,3,opt,name=_my_field_name_2,json=MyFieldName2" json:"_my_field_name_2,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *Reply_Entry) Reset() { *m = Reply_Entry{} } +func (m *Reply_Entry) String() string { return proto.CompactTextString(m) } +func (*Reply_Entry) ProtoMessage() {} + +const Default_Reply_Entry_Value int64 = 7 + +func (m *Reply_Entry) GetKeyThatNeeds_1234Camel_CasIng() int64 { + if m != nil && m.KeyThatNeeds_1234Camel_CasIng != nil { + return *m.KeyThatNeeds_1234Camel_CasIng + } + return 0 +} + +func (m *Reply_Entry) GetValue() int64 { + if m != nil && m.Value != nil { + return *m.Value + } + return Default_Reply_Entry_Value +} + +func (m *Reply_Entry) GetXMyFieldName_2() int64 { + if m != nil && m.XMyFieldName_2 != nil { + return *m.XMyFieldName_2 + } + return 0 +} + +type OtherBase struct { + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *OtherBase) Reset() { *m = OtherBase{} } +func (m *OtherBase) String() string { return proto.CompactTextString(m) } +func (*OtherBase) ProtoMessage() {} + +var extRange_OtherBase = []proto.ExtensionRange{ + {100, 536870911}, +} + +func (*OtherBase) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_OtherBase +} + +func (m *OtherBase) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +type ReplyExtensions struct { + XXX_unrecognized []byte `json:"-"` +} + +func (m *ReplyExtensions) Reset() { *m = ReplyExtensions{} } +func (m *ReplyExtensions) String() string { return proto.CompactTextString(m) } +func (*ReplyExtensions) ProtoMessage() {} + +var E_ReplyExtensions_Time = &proto.ExtensionDesc{ + ExtendedType: (*Reply)(nil), + ExtensionType: (*float64)(nil), + Field: 101, + Name: "my.test.ReplyExtensions.time", + Tag: "fixed64,101,opt,name=time", + Filename: "my_test/test.proto", +} + +var E_ReplyExtensions_Carrot = &proto.ExtensionDesc{ + ExtendedType: (*Reply)(nil), + ExtensionType: (*ReplyExtensions)(nil), + Field: 105, + Name: "my.test.ReplyExtensions.carrot", + Tag: "bytes,105,opt,name=carrot", + Filename: "my_test/test.proto", +} + +var E_ReplyExtensions_Donut = &proto.ExtensionDesc{ + ExtendedType: (*OtherBase)(nil), + ExtensionType: (*ReplyExtensions)(nil), + Field: 101, + Name: "my.test.ReplyExtensions.donut", + Tag: "bytes,101,opt,name=donut", + Filename: "my_test/test.proto", +} + +type OtherReplyExtensions struct { + Key *int32 `protobuf:"varint,1,opt,name=key" json:"key,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *OtherReplyExtensions) Reset() { *m = OtherReplyExtensions{} } +func (m *OtherReplyExtensions) String() string { return proto.CompactTextString(m) } +func (*OtherReplyExtensions) ProtoMessage() {} + +func (m *OtherReplyExtensions) GetKey() int32 { + if m != nil && m.Key != nil { + return *m.Key + } + return 0 +} + +type OldReply struct { + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *OldReply) Reset() { *m = OldReply{} } +func (m *OldReply) String() string { return proto.CompactTextString(m) } +func (*OldReply) ProtoMessage() {} + +func (m *OldReply) Marshal() ([]byte, error) { + return proto.MarshalMessageSet(&m.XXX_InternalExtensions) +} +func (m *OldReply) Unmarshal(buf []byte) error { + return proto.UnmarshalMessageSet(buf, &m.XXX_InternalExtensions) +} +func (m *OldReply) MarshalJSON() ([]byte, error) { + return proto.MarshalMessageSetJSON(&m.XXX_InternalExtensions) +} +func (m *OldReply) UnmarshalJSON(buf []byte) error { + return proto.UnmarshalMessageSetJSON(buf, &m.XXX_InternalExtensions) +} + +// ensure OldReply satisfies proto.Marshaler and proto.Unmarshaler +var _ proto.Marshaler = (*OldReply)(nil) +var _ proto.Unmarshaler = (*OldReply)(nil) + +var extRange_OldReply = []proto.ExtensionRange{ + {100, 2147483646}, +} + +func (*OldReply) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_OldReply +} + +type Communique struct { + MakeMeCry *bool `protobuf:"varint,1,opt,name=make_me_cry,json=makeMeCry" json:"make_me_cry,omitempty"` + // This is a oneof, called "union". + // + // Types that are valid to be assigned to Union: + // *Communique_Number + // *Communique_Name + // *Communique_Data + // *Communique_TempC + // *Communique_Height + // *Communique_Today + // *Communique_Maybe + // *Communique_Delta_ + // *Communique_Msg + // *Communique_Somegroup + Union isCommunique_Union `protobuf_oneof:"union"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *Communique) Reset() { *m = Communique{} } +func (m *Communique) String() string { return proto.CompactTextString(m) } +func (*Communique) ProtoMessage() {} + +type isCommunique_Union interface { + isCommunique_Union() +} + +type Communique_Number struct { + Number int32 `protobuf:"varint,5,opt,name=number,oneof"` +} +type Communique_Name struct { + Name string `protobuf:"bytes,6,opt,name=name,oneof"` +} +type Communique_Data struct { + Data []byte `protobuf:"bytes,7,opt,name=data,oneof"` +} +type Communique_TempC struct { + TempC float64 `protobuf:"fixed64,8,opt,name=temp_c,json=tempC,oneof"` +} +type Communique_Height struct { + Height float32 `protobuf:"fixed32,9,opt,name=height,oneof"` +} +type Communique_Today struct { + Today Days `protobuf:"varint,10,opt,name=today,enum=my.test.Days,oneof"` +} +type Communique_Maybe struct { + Maybe bool `protobuf:"varint,11,opt,name=maybe,oneof"` +} +type Communique_Delta_ struct { + Delta int32 `protobuf:"zigzag32,12,opt,name=delta,oneof"` +} +type Communique_Msg struct { + Msg *Reply `protobuf:"bytes,13,opt,name=msg,oneof"` +} +type Communique_Somegroup struct { + Somegroup *Communique_SomeGroup `protobuf:"group,14,opt,name=SomeGroup,json=somegroup,oneof"` +} + +func (*Communique_Number) isCommunique_Union() {} +func (*Communique_Name) isCommunique_Union() {} +func (*Communique_Data) isCommunique_Union() {} +func (*Communique_TempC) isCommunique_Union() {} +func (*Communique_Height) isCommunique_Union() {} +func (*Communique_Today) isCommunique_Union() {} +func (*Communique_Maybe) isCommunique_Union() {} +func (*Communique_Delta_) isCommunique_Union() {} +func (*Communique_Msg) isCommunique_Union() {} +func (*Communique_Somegroup) isCommunique_Union() {} + +func (m *Communique) GetUnion() isCommunique_Union { + if m != nil { + return m.Union + } + return nil +} + +func (m *Communique) GetMakeMeCry() bool { + if m != nil && m.MakeMeCry != nil { + return *m.MakeMeCry + } + return false +} + +func (m *Communique) GetNumber() int32 { + if x, ok := m.GetUnion().(*Communique_Number); ok { + return x.Number + } + return 0 +} + +func (m *Communique) GetName() string { + if x, ok := m.GetUnion().(*Communique_Name); ok { + return x.Name + } + return "" +} + +func (m *Communique) GetData() []byte { + if x, ok := m.GetUnion().(*Communique_Data); ok { + return x.Data + } + return nil +} + +func (m *Communique) GetTempC() float64 { + if x, ok := m.GetUnion().(*Communique_TempC); ok { + return x.TempC + } + return 0 +} + +func (m *Communique) GetHeight() float32 { + if x, ok := m.GetUnion().(*Communique_Height); ok { + return x.Height + } + return 0 +} + +func (m *Communique) GetToday() Days { + if x, ok := m.GetUnion().(*Communique_Today); ok { + return x.Today + } + return Days_MONDAY +} + +func (m *Communique) GetMaybe() bool { + if x, ok := m.GetUnion().(*Communique_Maybe); ok { + return x.Maybe + } + return false +} + +func (m *Communique) GetDelta() int32 { + if x, ok := m.GetUnion().(*Communique_Delta_); ok { + return x.Delta + } + return 0 +} + +func (m *Communique) GetMsg() *Reply { + if x, ok := m.GetUnion().(*Communique_Msg); ok { + return x.Msg + } + return nil +} + +func (m *Communique) GetSomegroup() *Communique_SomeGroup { + if x, ok := m.GetUnion().(*Communique_Somegroup); ok { + return x.Somegroup + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Communique) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Communique_OneofMarshaler, _Communique_OneofUnmarshaler, _Communique_OneofSizer, []interface{}{ + (*Communique_Number)(nil), + (*Communique_Name)(nil), + (*Communique_Data)(nil), + (*Communique_TempC)(nil), + (*Communique_Height)(nil), + (*Communique_Today)(nil), + (*Communique_Maybe)(nil), + (*Communique_Delta_)(nil), + (*Communique_Msg)(nil), + (*Communique_Somegroup)(nil), + } +} + +func _Communique_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Communique) + // union + switch x := m.Union.(type) { + case *Communique_Number: + b.EncodeVarint(5<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Number)) + case *Communique_Name: + b.EncodeVarint(6<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Name) + case *Communique_Data: + b.EncodeVarint(7<<3 | proto.WireBytes) + b.EncodeRawBytes(x.Data) + case *Communique_TempC: + b.EncodeVarint(8<<3 | proto.WireFixed64) + b.EncodeFixed64(math.Float64bits(x.TempC)) + case *Communique_Height: + b.EncodeVarint(9<<3 | proto.WireFixed32) + b.EncodeFixed32(uint64(math.Float32bits(x.Height))) + case *Communique_Today: + b.EncodeVarint(10<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Today)) + case *Communique_Maybe: + t := uint64(0) + if x.Maybe { + t = 1 + } + b.EncodeVarint(11<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *Communique_Delta_: + b.EncodeVarint(12<<3 | proto.WireVarint) + b.EncodeZigzag32(uint64(x.Delta)) + case *Communique_Msg: + b.EncodeVarint(13<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Msg); err != nil { + return err + } + case *Communique_Somegroup: + b.EncodeVarint(14<<3 | proto.WireStartGroup) + if err := b.Marshal(x.Somegroup); err != nil { + return err + } + b.EncodeVarint(14<<3 | proto.WireEndGroup) + case nil: + default: + return fmt.Errorf("Communique.Union has unexpected type %T", x) + } + return nil +} + +func _Communique_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Communique) + switch tag { + case 5: // union.number + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Union = &Communique_Number{int32(x)} + return true, err + case 6: // union.name + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Union = &Communique_Name{x} + return true, err + case 7: // union.data + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Union = &Communique_Data{x} + return true, err + case 8: // union.temp_c + if wire != proto.WireFixed64 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed64() + m.Union = &Communique_TempC{math.Float64frombits(x)} + return true, err + case 9: // union.height + if wire != proto.WireFixed32 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed32() + m.Union = &Communique_Height{math.Float32frombits(uint32(x))} + return true, err + case 10: // union.today + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Union = &Communique_Today{Days(x)} + return true, err + case 11: // union.maybe + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Union = &Communique_Maybe{x != 0} + return true, err + case 12: // union.delta + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeZigzag32() + m.Union = &Communique_Delta_{int32(x)} + return true, err + case 13: // union.msg + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Reply) + err := b.DecodeMessage(msg) + m.Union = &Communique_Msg{msg} + return true, err + case 14: // union.somegroup + if wire != proto.WireStartGroup { + return true, proto.ErrInternalBadWireType + } + msg := new(Communique_SomeGroup) + err := b.DecodeGroup(msg) + m.Union = &Communique_Somegroup{msg} + return true, err + default: + return false, nil + } +} + +func _Communique_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Communique) + // union + switch x := m.Union.(type) { + case *Communique_Number: + n += proto.SizeVarint(5<<3 | proto.WireVarint) + n += proto.SizeVarint(uint64(x.Number)) + case *Communique_Name: + n += proto.SizeVarint(6<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(len(x.Name))) + n += len(x.Name) + case *Communique_Data: + n += proto.SizeVarint(7<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(len(x.Data))) + n += len(x.Data) + case *Communique_TempC: + n += proto.SizeVarint(8<<3 | proto.WireFixed64) + n += 8 + case *Communique_Height: + n += proto.SizeVarint(9<<3 | proto.WireFixed32) + n += 4 + case *Communique_Today: + n += proto.SizeVarint(10<<3 | proto.WireVarint) + n += proto.SizeVarint(uint64(x.Today)) + case *Communique_Maybe: + n += proto.SizeVarint(11<<3 | proto.WireVarint) + n += 1 + case *Communique_Delta_: + n += proto.SizeVarint(12<<3 | proto.WireVarint) + n += proto.SizeVarint(uint64((uint32(x.Delta) << 1) ^ uint32((int32(x.Delta) >> 31)))) + case *Communique_Msg: + s := proto.Size(x.Msg) + n += proto.SizeVarint(13<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *Communique_Somegroup: + n += proto.SizeVarint(14<<3 | proto.WireStartGroup) + n += proto.Size(x.Somegroup) + n += proto.SizeVarint(14<<3 | proto.WireEndGroup) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type Communique_SomeGroup struct { + Member *string `protobuf:"bytes,15,opt,name=member" json:"member,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *Communique_SomeGroup) Reset() { *m = Communique_SomeGroup{} } +func (m *Communique_SomeGroup) String() string { return proto.CompactTextString(m) } +func (*Communique_SomeGroup) ProtoMessage() {} + +func (m *Communique_SomeGroup) GetMember() string { + if m != nil && m.Member != nil { + return *m.Member + } + return "" +} + +type Communique_Delta struct { + XXX_unrecognized []byte `json:"-"` +} + +func (m *Communique_Delta) Reset() { *m = Communique_Delta{} } +func (m *Communique_Delta) String() string { return proto.CompactTextString(m) } +func (*Communique_Delta) ProtoMessage() {} + +var E_Tag = &proto.ExtensionDesc{ + ExtendedType: (*Reply)(nil), + ExtensionType: (*string)(nil), + Field: 103, + Name: "my.test.tag", + Tag: "bytes,103,opt,name=tag", + Filename: "my_test/test.proto", +} + +var E_Donut = &proto.ExtensionDesc{ + ExtendedType: (*Reply)(nil), + ExtensionType: (*OtherReplyExtensions)(nil), + Field: 106, + Name: "my.test.donut", + Tag: "bytes,106,opt,name=donut", + Filename: "my_test/test.proto", +} + +func init() { + proto.RegisterType((*Request)(nil), "my.test.Request") + proto.RegisterType((*Request_SomeGroup)(nil), "my.test.Request.SomeGroup") + proto.RegisterType((*Reply)(nil), "my.test.Reply") + proto.RegisterType((*Reply_Entry)(nil), "my.test.Reply.Entry") + proto.RegisterType((*OtherBase)(nil), "my.test.OtherBase") + proto.RegisterType((*ReplyExtensions)(nil), "my.test.ReplyExtensions") + proto.RegisterType((*OtherReplyExtensions)(nil), "my.test.OtherReplyExtensions") + proto.RegisterType((*OldReply)(nil), "my.test.OldReply") + proto.RegisterType((*Communique)(nil), "my.test.Communique") + proto.RegisterType((*Communique_SomeGroup)(nil), "my.test.Communique.SomeGroup") + proto.RegisterType((*Communique_Delta)(nil), "my.test.Communique.Delta") + proto.RegisterEnum("my.test.HatType", HatType_name, HatType_value) + proto.RegisterEnum("my.test.Days", Days_name, Days_value) + proto.RegisterEnum("my.test.Request_Color", Request_Color_name, Request_Color_value) + proto.RegisterEnum("my.test.Reply_Entry_Game", Reply_Entry_Game_name, Reply_Entry_Game_value) + proto.RegisterExtension(E_ReplyExtensions_Time) + proto.RegisterExtension(E_ReplyExtensions_Carrot) + proto.RegisterExtension(E_ReplyExtensions_Donut) + proto.RegisterExtension(E_Tag) + proto.RegisterExtension(E_Donut) +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/my_test/test.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/my_test/test.proto new file mode 100644 index 000000000..8e7094632 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/my_test/test.proto @@ -0,0 +1,156 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2010 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto2"; + +// This package holds interesting messages. +package my.test; // dotted package name + +//import "imp.proto"; +import "multi/multi1.proto"; // unused import + +enum HatType { + // deliberately skipping 0 + FEDORA = 1; + FEZ = 2; +} + +// This enum represents days of the week. +enum Days { + option allow_alias = true; + + MONDAY = 1; + TUESDAY = 2; + LUNDI = 1; // same value as MONDAY +} + +// This is a message that might be sent somewhere. +message Request { + enum Color { + RED = 0; + GREEN = 1; + BLUE = 2; + } + repeated int64 key = 1; +// optional imp.ImportedMessage imported_message = 2; + optional Color hue = 3; // no default + optional HatType hat = 4 [default=FEDORA]; +// optional imp.ImportedMessage.Owner owner = 6; + optional float deadline = 7 [default=inf]; + optional group SomeGroup = 8 { + optional int32 group_field = 9; + } + + // These foreign types are in imp2.proto, + // which is publicly imported by imp.proto. +// optional imp.PubliclyImportedMessage pub = 10; +// optional imp.PubliclyImportedEnum pub_enum = 13 [default=HAIR]; + + + // This is a map field. It will generate map[int32]string. + map name_mapping = 14; + // This is a map field whose value type is a message. + map msg_mapping = 15; + + optional int32 reset = 12; + // This field should not conflict with any getters. + optional string get_key = 16; +} + +message Reply { + message Entry { + required int64 key_that_needs_1234camel_CasIng = 1; + optional int64 value = 2 [default=7]; + optional int64 _my_field_name_2 = 3; + enum Game { + FOOTBALL = 1; + TENNIS = 2; + } + } + repeated Entry found = 1; + repeated int32 compact_keys = 2 [packed=true]; + extensions 100 to max; +} + +message OtherBase { + optional string name = 1; + extensions 100 to max; +} + +message ReplyExtensions { + extend Reply { + optional double time = 101; + optional ReplyExtensions carrot = 105; + } + extend OtherBase { + optional ReplyExtensions donut = 101; + } +} + +message OtherReplyExtensions { + optional int32 key = 1; +} + +// top-level extension +extend Reply { + optional string tag = 103; + optional OtherReplyExtensions donut = 106; +// optional imp.ImportedMessage elephant = 107; // extend with message from another file. +} + +message OldReply { + // Extensions will be encoded in MessageSet wire format. + option message_set_wire_format = true; + extensions 100 to max; +} + +message Communique { + optional bool make_me_cry = 1; + + // This is a oneof, called "union". + oneof union { + int32 number = 5; + string name = 6; + bytes data = 7; + double temp_c = 8; + float height = 9; + Days today = 10; + bool maybe = 11; + sint32 delta = 12; // name will conflict with Delta below + Reply msg = 13; + group SomeGroup = 14 { + optional string member = 15; + } + } + + message Delta {} +} + diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/proto3.proto b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/proto3.proto new file mode 100644 index 000000000..869b9af5a --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/testdata/proto3.proto @@ -0,0 +1,53 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2014 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package proto3; + +message Request { + enum Flavour { + SWEET = 0; + SOUR = 1; + UMAMI = 2; + GOPHERLICIOUS = 3; + } + string name = 1; + repeated int64 key = 2; + Flavour taste = 3; + Book book = 4; + repeated int64 unpacked = 5 [packed=false]; +} + +message Book { + string title = 1; + bytes raw_data = 2; +} diff --git a/vendor/github.com/golang/protobuf/ptypes/any.go b/vendor/github.com/golang/protobuf/ptypes/any.go new file mode 100644 index 000000000..b2af97f4a --- /dev/null +++ b/vendor/github.com/golang/protobuf/ptypes/any.go @@ -0,0 +1,139 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2016 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package ptypes + +// This file implements functions to marshal proto.Message to/from +// google.protobuf.Any message. + +import ( + "fmt" + "reflect" + "strings" + + "github.com/golang/protobuf/proto" + "github.com/golang/protobuf/ptypes/any" +) + +const googleApis = "type.googleapis.com/" + +// AnyMessageName returns the name of the message contained in a google.protobuf.Any message. +// +// Note that regular type assertions should be done using the Is +// function. AnyMessageName is provided for less common use cases like filtering a +// sequence of Any messages based on a set of allowed message type names. +func AnyMessageName(any *any.Any) (string, error) { + if any == nil { + return "", fmt.Errorf("message is nil") + } + slash := strings.LastIndex(any.TypeUrl, "/") + if slash < 0 { + return "", fmt.Errorf("message type url %q is invalid", any.TypeUrl) + } + return any.TypeUrl[slash+1:], nil +} + +// MarshalAny takes the protocol buffer and encodes it into google.protobuf.Any. +func MarshalAny(pb proto.Message) (*any.Any, error) { + value, err := proto.Marshal(pb) + if err != nil { + return nil, err + } + return &any.Any{TypeUrl: googleApis + proto.MessageName(pb), Value: value}, nil +} + +// DynamicAny is a value that can be passed to UnmarshalAny to automatically +// allocate a proto.Message for the type specified in a google.protobuf.Any +// message. The allocated message is stored in the embedded proto.Message. +// +// Example: +// +// var x ptypes.DynamicAny +// if err := ptypes.UnmarshalAny(a, &x); err != nil { ... } +// fmt.Printf("unmarshaled message: %v", x.Message) +type DynamicAny struct { + proto.Message +} + +// Empty returns a new proto.Message of the type specified in a +// google.protobuf.Any message. It returns an error if corresponding message +// type isn't linked in. +func Empty(any *any.Any) (proto.Message, error) { + aname, err := AnyMessageName(any) + if err != nil { + return nil, err + } + + t := proto.MessageType(aname) + if t == nil { + return nil, fmt.Errorf("any: message type %q isn't linked in", aname) + } + return reflect.New(t.Elem()).Interface().(proto.Message), nil +} + +// UnmarshalAny parses the protocol buffer representation in a google.protobuf.Any +// message and places the decoded result in pb. It returns an error if type of +// contents of Any message does not match type of pb message. +// +// pb can be a proto.Message, or a *DynamicAny. +func UnmarshalAny(any *any.Any, pb proto.Message) error { + if d, ok := pb.(*DynamicAny); ok { + if d.Message == nil { + var err error + d.Message, err = Empty(any) + if err != nil { + return err + } + } + return UnmarshalAny(any, d.Message) + } + + aname, err := AnyMessageName(any) + if err != nil { + return err + } + + mname := proto.MessageName(pb) + if aname != mname { + return fmt.Errorf("mismatched message type: got %q want %q", aname, mname) + } + return proto.Unmarshal(any.Value, pb) +} + +// Is returns true if any value contains a given message type. +func Is(any *any.Any, pb proto.Message) bool { + aname, err := AnyMessageName(any) + if err != nil { + return false + } + + return aname == proto.MessageName(pb) +} diff --git a/vendor/github.com/golang/protobuf/ptypes/any/any.pb.go b/vendor/github.com/golang/protobuf/ptypes/any/any.pb.go new file mode 100644 index 000000000..f34601723 --- /dev/null +++ b/vendor/github.com/golang/protobuf/ptypes/any/any.pb.go @@ -0,0 +1,178 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/protobuf/any.proto + +/* +Package any is a generated protocol buffer package. + +It is generated from these files: + google/protobuf/any.proto + +It has these top-level messages: + Any +*/ +package any + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// `Any` contains an arbitrary serialized protocol buffer message along with a +// URL that describes the type of the serialized message. +// +// Protobuf library provides support to pack/unpack Any values in the form +// of utility functions or additional generated methods of the Any type. +// +// Example 1: Pack and unpack a message in C++. +// +// Foo foo = ...; +// Any any; +// any.PackFrom(foo); +// ... +// if (any.UnpackTo(&foo)) { +// ... +// } +// +// Example 2: Pack and unpack a message in Java. +// +// Foo foo = ...; +// Any any = Any.pack(foo); +// ... +// if (any.is(Foo.class)) { +// foo = any.unpack(Foo.class); +// } +// +// Example 3: Pack and unpack a message in Python. +// +// foo = Foo(...) +// any = Any() +// any.Pack(foo) +// ... +// if any.Is(Foo.DESCRIPTOR): +// any.Unpack(foo) +// ... +// +// Example 4: Pack and unpack a message in Go +// +// foo := &pb.Foo{...} +// any, err := ptypes.MarshalAny(foo) +// ... +// foo := &pb.Foo{} +// if err := ptypes.UnmarshalAny(any, foo); err != nil { +// ... +// } +// +// The pack methods provided by protobuf library will by default use +// 'type.googleapis.com/full.type.name' as the type URL and the unpack +// methods only use the fully qualified type name after the last '/' +// in the type URL, for example "foo.bar.com/x/y.z" will yield type +// name "y.z". +// +// +// JSON +// ==== +// The JSON representation of an `Any` value uses the regular +// representation of the deserialized, embedded message, with an +// additional field `@type` which contains the type URL. Example: +// +// package google.profile; +// message Person { +// string first_name = 1; +// string last_name = 2; +// } +// +// { +// "@type": "type.googleapis.com/google.profile.Person", +// "firstName": , +// "lastName": +// } +// +// If the embedded message type is well-known and has a custom JSON +// representation, that representation will be embedded adding a field +// `value` which holds the custom JSON in addition to the `@type` +// field. Example (for message [google.protobuf.Duration][]): +// +// { +// "@type": "type.googleapis.com/google.protobuf.Duration", +// "value": "1.212s" +// } +// +type Any struct { + // A URL/resource name whose content describes the type of the + // serialized protocol buffer message. + // + // For URLs which use the scheme `http`, `https`, or no scheme, the + // following restrictions and interpretations apply: + // + // * If no scheme is provided, `https` is assumed. + // * The last segment of the URL's path must represent the fully + // qualified name of the type (as in `path/google.protobuf.Duration`). + // The name should be in a canonical form (e.g., leading "." is + // not accepted). + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + // + TypeUrl string `protobuf:"bytes,1,opt,name=type_url,json=typeUrl" json:"type_url,omitempty"` + // Must be a valid serialized protocol buffer of the above specified type. + Value []byte `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` +} + +func (m *Any) Reset() { *m = Any{} } +func (m *Any) String() string { return proto.CompactTextString(m) } +func (*Any) ProtoMessage() {} +func (*Any) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } +func (*Any) XXX_WellKnownType() string { return "Any" } + +func (m *Any) GetTypeUrl() string { + if m != nil { + return m.TypeUrl + } + return "" +} + +func (m *Any) GetValue() []byte { + if m != nil { + return m.Value + } + return nil +} + +func init() { + proto.RegisterType((*Any)(nil), "google.protobuf.Any") +} + +func init() { proto.RegisterFile("google/protobuf/any.proto", fileDescriptor0) } + +var fileDescriptor0 = []byte{ + // 185 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4c, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x4f, 0xcc, 0xab, 0xd4, + 0x03, 0x73, 0x84, 0xf8, 0x21, 0x52, 0x7a, 0x30, 0x29, 0x25, 0x33, 0x2e, 0x66, 0xc7, 0xbc, 0x4a, + 0x21, 0x49, 0x2e, 0x8e, 0x92, 0xca, 0x82, 0xd4, 0xf8, 0xd2, 0xa2, 0x1c, 0x09, 0x46, 0x05, 0x46, + 0x0d, 0xce, 0x20, 0x76, 0x10, 0x3f, 0xb4, 0x28, 0x47, 0x48, 0x84, 0x8b, 0xb5, 0x2c, 0x31, 0xa7, + 0x34, 0x55, 0x82, 0x49, 0x81, 0x51, 0x83, 0x27, 0x08, 0xc2, 0x71, 0xca, 0xe7, 0x12, 0x4e, 0xce, + 0xcf, 0xd5, 0x43, 0x33, 0xce, 0x89, 0xc3, 0x31, 0xaf, 0x32, 0x00, 0xc4, 0x09, 0x60, 0x8c, 0x52, + 0x4d, 0xcf, 0x2c, 0xc9, 0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x4f, 0xcf, 0xcf, 0x49, 0xcc, + 0x4b, 0x47, 0xb8, 0xa8, 0x00, 0x64, 0x7a, 0x31, 0xc8, 0x61, 0x8b, 0x98, 0x98, 0xdd, 0x03, 0x9c, + 0x56, 0x31, 0xc9, 0xb9, 0x43, 0x8c, 0x0a, 0x80, 0x2a, 0xd1, 0x0b, 0x4f, 0xcd, 0xc9, 0xf1, 0xce, + 0xcb, 0x2f, 0xcf, 0x0b, 0x01, 0x29, 0x4d, 0x62, 0x03, 0xeb, 0x35, 0x06, 0x04, 0x00, 0x00, 0xff, + 0xff, 0x13, 0xf8, 0xe8, 0x42, 0xdd, 0x00, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/ptypes/any/any.proto b/vendor/github.com/golang/protobuf/ptypes/any/any.proto new file mode 100644 index 000000000..c74866762 --- /dev/null +++ b/vendor/github.com/golang/protobuf/ptypes/any/any.proto @@ -0,0 +1,149 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option go_package = "github.com/golang/protobuf/ptypes/any"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "AnyProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; + +// `Any` contains an arbitrary serialized protocol buffer message along with a +// URL that describes the type of the serialized message. +// +// Protobuf library provides support to pack/unpack Any values in the form +// of utility functions or additional generated methods of the Any type. +// +// Example 1: Pack and unpack a message in C++. +// +// Foo foo = ...; +// Any any; +// any.PackFrom(foo); +// ... +// if (any.UnpackTo(&foo)) { +// ... +// } +// +// Example 2: Pack and unpack a message in Java. +// +// Foo foo = ...; +// Any any = Any.pack(foo); +// ... +// if (any.is(Foo.class)) { +// foo = any.unpack(Foo.class); +// } +// +// Example 3: Pack and unpack a message in Python. +// +// foo = Foo(...) +// any = Any() +// any.Pack(foo) +// ... +// if any.Is(Foo.DESCRIPTOR): +// any.Unpack(foo) +// ... +// +// Example 4: Pack and unpack a message in Go +// +// foo := &pb.Foo{...} +// any, err := ptypes.MarshalAny(foo) +// ... +// foo := &pb.Foo{} +// if err := ptypes.UnmarshalAny(any, foo); err != nil { +// ... +// } +// +// The pack methods provided by protobuf library will by default use +// 'type.googleapis.com/full.type.name' as the type URL and the unpack +// methods only use the fully qualified type name after the last '/' +// in the type URL, for example "foo.bar.com/x/y.z" will yield type +// name "y.z". +// +// +// JSON +// ==== +// The JSON representation of an `Any` value uses the regular +// representation of the deserialized, embedded message, with an +// additional field `@type` which contains the type URL. Example: +// +// package google.profile; +// message Person { +// string first_name = 1; +// string last_name = 2; +// } +// +// { +// "@type": "type.googleapis.com/google.profile.Person", +// "firstName": , +// "lastName": +// } +// +// If the embedded message type is well-known and has a custom JSON +// representation, that representation will be embedded adding a field +// `value` which holds the custom JSON in addition to the `@type` +// field. Example (for message [google.protobuf.Duration][]): +// +// { +// "@type": "type.googleapis.com/google.protobuf.Duration", +// "value": "1.212s" +// } +// +message Any { + // A URL/resource name whose content describes the type of the + // serialized protocol buffer message. + // + // For URLs which use the scheme `http`, `https`, or no scheme, the + // following restrictions and interpretations apply: + // + // * If no scheme is provided, `https` is assumed. + // * The last segment of the URL's path must represent the fully + // qualified name of the type (as in `path/google.protobuf.Duration`). + // The name should be in a canonical form (e.g., leading "." is + // not accepted). + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + // + string type_url = 1; + + // Must be a valid serialized protocol buffer of the above specified type. + bytes value = 2; +} diff --git a/vendor/github.com/golang/protobuf/ptypes/any_test.go b/vendor/github.com/golang/protobuf/ptypes/any_test.go new file mode 100644 index 000000000..ed675b489 --- /dev/null +++ b/vendor/github.com/golang/protobuf/ptypes/any_test.go @@ -0,0 +1,113 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2016 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package ptypes + +import ( + "testing" + + "github.com/golang/protobuf/proto" + pb "github.com/golang/protobuf/protoc-gen-go/descriptor" + "github.com/golang/protobuf/ptypes/any" +) + +func TestMarshalUnmarshal(t *testing.T) { + orig := &any.Any{Value: []byte("test")} + + packed, err := MarshalAny(orig) + if err != nil { + t.Errorf("MarshalAny(%+v): got: _, %v exp: _, nil", orig, err) + } + + unpacked := &any.Any{} + err = UnmarshalAny(packed, unpacked) + if err != nil || !proto.Equal(unpacked, orig) { + t.Errorf("got: %v, %+v; want nil, %+v", err, unpacked, orig) + } +} + +func TestIs(t *testing.T) { + a, err := MarshalAny(&pb.FileDescriptorProto{}) + if err != nil { + t.Fatal(err) + } + if Is(a, &pb.DescriptorProto{}) { + t.Error("FileDescriptorProto is not a DescriptorProto, but Is says it is") + } + if !Is(a, &pb.FileDescriptorProto{}) { + t.Error("FileDescriptorProto is indeed a FileDescriptorProto, but Is says it is not") + } +} + +func TestIsDifferentUrlPrefixes(t *testing.T) { + m := &pb.FileDescriptorProto{} + a := &any.Any{TypeUrl: "foo/bar/" + proto.MessageName(m)} + if !Is(a, m) { + t.Errorf("message with type url %q didn't satisfy Is for type %q", a.TypeUrl, proto.MessageName(m)) + } +} + +func TestUnmarshalDynamic(t *testing.T) { + want := &pb.FileDescriptorProto{Name: proto.String("foo")} + a, err := MarshalAny(want) + if err != nil { + t.Fatal(err) + } + var got DynamicAny + if err := UnmarshalAny(a, &got); err != nil { + t.Fatal(err) + } + if !proto.Equal(got.Message, want) { + t.Errorf("invalid result from UnmarshalAny, got %q want %q", got.Message, want) + } +} + +func TestEmpty(t *testing.T) { + want := &pb.FileDescriptorProto{} + a, err := MarshalAny(want) + if err != nil { + t.Fatal(err) + } + got, err := Empty(a) + if err != nil { + t.Fatal(err) + } + if !proto.Equal(got, want) { + t.Errorf("unequal empty message, got %q, want %q", got, want) + } + + // that's a valid type_url for a message which shouldn't be linked into this + // test binary. We want an error. + a.TypeUrl = "type.googleapis.com/google.protobuf.FieldMask" + if _, err := Empty(a); err == nil { + t.Errorf("got no error for an attempt to create a message of type %q, which shouldn't be linked in", a.TypeUrl) + } +} diff --git a/vendor/github.com/golang/protobuf/ptypes/doc.go b/vendor/github.com/golang/protobuf/ptypes/doc.go new file mode 100644 index 000000000..c0d595da7 --- /dev/null +++ b/vendor/github.com/golang/protobuf/ptypes/doc.go @@ -0,0 +1,35 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2016 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +/* +Package ptypes contains code for interacting with well-known types. +*/ +package ptypes diff --git a/vendor/github.com/golang/protobuf/ptypes/duration.go b/vendor/github.com/golang/protobuf/ptypes/duration.go new file mode 100644 index 000000000..65cb0f8eb --- /dev/null +++ b/vendor/github.com/golang/protobuf/ptypes/duration.go @@ -0,0 +1,102 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2016 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package ptypes + +// This file implements conversions between google.protobuf.Duration +// and time.Duration. + +import ( + "errors" + "fmt" + "time" + + durpb "github.com/golang/protobuf/ptypes/duration" +) + +const ( + // Range of a durpb.Duration in seconds, as specified in + // google/protobuf/duration.proto. This is about 10,000 years in seconds. + maxSeconds = int64(10000 * 365.25 * 24 * 60 * 60) + minSeconds = -maxSeconds +) + +// validateDuration determines whether the durpb.Duration is valid according to the +// definition in google/protobuf/duration.proto. A valid durpb.Duration +// may still be too large to fit into a time.Duration (the range of durpb.Duration +// is about 10,000 years, and the range of time.Duration is about 290). +func validateDuration(d *durpb.Duration) error { + if d == nil { + return errors.New("duration: nil Duration") + } + if d.Seconds < minSeconds || d.Seconds > maxSeconds { + return fmt.Errorf("duration: %v: seconds out of range", d) + } + if d.Nanos <= -1e9 || d.Nanos >= 1e9 { + return fmt.Errorf("duration: %v: nanos out of range", d) + } + // Seconds and Nanos must have the same sign, unless d.Nanos is zero. + if (d.Seconds < 0 && d.Nanos > 0) || (d.Seconds > 0 && d.Nanos < 0) { + return fmt.Errorf("duration: %v: seconds and nanos have different signs", d) + } + return nil +} + +// Duration converts a durpb.Duration to a time.Duration. Duration +// returns an error if the durpb.Duration is invalid or is too large to be +// represented in a time.Duration. +func Duration(p *durpb.Duration) (time.Duration, error) { + if err := validateDuration(p); err != nil { + return 0, err + } + d := time.Duration(p.Seconds) * time.Second + if int64(d/time.Second) != p.Seconds { + return 0, fmt.Errorf("duration: %v is out of range for time.Duration", p) + } + if p.Nanos != 0 { + d += time.Duration(p.Nanos) + if (d < 0) != (p.Nanos < 0) { + return 0, fmt.Errorf("duration: %v is out of range for time.Duration", p) + } + } + return d, nil +} + +// DurationProto converts a time.Duration to a durpb.Duration. +func DurationProto(d time.Duration) *durpb.Duration { + nanos := d.Nanoseconds() + secs := nanos / 1e9 + nanos -= secs * 1e9 + return &durpb.Duration{ + Seconds: secs, + Nanos: int32(nanos), + } +} diff --git a/vendor/github.com/golang/protobuf/ptypes/duration/duration.pb.go b/vendor/github.com/golang/protobuf/ptypes/duration/duration.pb.go new file mode 100644 index 000000000..b2410a098 --- /dev/null +++ b/vendor/github.com/golang/protobuf/ptypes/duration/duration.pb.go @@ -0,0 +1,144 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/protobuf/duration.proto + +/* +Package duration is a generated protocol buffer package. + +It is generated from these files: + google/protobuf/duration.proto + +It has these top-level messages: + Duration +*/ +package duration + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A Duration represents a signed, fixed-length span of time represented +// as a count of seconds and fractions of seconds at nanosecond +// resolution. It is independent of any calendar and concepts like "day" +// or "month". It is related to Timestamp in that the difference between +// two Timestamp values is a Duration and it can be added or subtracted +// from a Timestamp. Range is approximately +-10,000 years. +// +// # Examples +// +// Example 1: Compute Duration from two Timestamps in pseudo code. +// +// Timestamp start = ...; +// Timestamp end = ...; +// Duration duration = ...; +// +// duration.seconds = end.seconds - start.seconds; +// duration.nanos = end.nanos - start.nanos; +// +// if (duration.seconds < 0 && duration.nanos > 0) { +// duration.seconds += 1; +// duration.nanos -= 1000000000; +// } else if (durations.seconds > 0 && duration.nanos < 0) { +// duration.seconds -= 1; +// duration.nanos += 1000000000; +// } +// +// Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. +// +// Timestamp start = ...; +// Duration duration = ...; +// Timestamp end = ...; +// +// end.seconds = start.seconds + duration.seconds; +// end.nanos = start.nanos + duration.nanos; +// +// if (end.nanos < 0) { +// end.seconds -= 1; +// end.nanos += 1000000000; +// } else if (end.nanos >= 1000000000) { +// end.seconds += 1; +// end.nanos -= 1000000000; +// } +// +// Example 3: Compute Duration from datetime.timedelta in Python. +// +// td = datetime.timedelta(days=3, minutes=10) +// duration = Duration() +// duration.FromTimedelta(td) +// +// # JSON Mapping +// +// In JSON format, the Duration type is encoded as a string rather than an +// object, where the string ends in the suffix "s" (indicating seconds) and +// is preceded by the number of seconds, with nanoseconds expressed as +// fractional seconds. For example, 3 seconds with 0 nanoseconds should be +// encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should +// be expressed in JSON format as "3.000000001s", and 3 seconds and 1 +// microsecond should be expressed in JSON format as "3.000001s". +// +// +type Duration struct { + // Signed seconds of the span of time. Must be from -315,576,000,000 + // to +315,576,000,000 inclusive. Note: these bounds are computed from: + // 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years + Seconds int64 `protobuf:"varint,1,opt,name=seconds" json:"seconds,omitempty"` + // Signed fractions of a second at nanosecond resolution of the span + // of time. Durations less than one second are represented with a 0 + // `seconds` field and a positive or negative `nanos` field. For durations + // of one second or more, a non-zero value for the `nanos` field must be + // of the same sign as the `seconds` field. Must be from -999,999,999 + // to +999,999,999 inclusive. + Nanos int32 `protobuf:"varint,2,opt,name=nanos" json:"nanos,omitempty"` +} + +func (m *Duration) Reset() { *m = Duration{} } +func (m *Duration) String() string { return proto.CompactTextString(m) } +func (*Duration) ProtoMessage() {} +func (*Duration) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } +func (*Duration) XXX_WellKnownType() string { return "Duration" } + +func (m *Duration) GetSeconds() int64 { + if m != nil { + return m.Seconds + } + return 0 +} + +func (m *Duration) GetNanos() int32 { + if m != nil { + return m.Nanos + } + return 0 +} + +func init() { + proto.RegisterType((*Duration)(nil), "google.protobuf.Duration") +} + +func init() { proto.RegisterFile("google/protobuf/duration.proto", fileDescriptor0) } + +var fileDescriptor0 = []byte{ + // 190 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4b, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x4f, 0x29, 0x2d, 0x4a, + 0x2c, 0xc9, 0xcc, 0xcf, 0xd3, 0x03, 0x8b, 0x08, 0xf1, 0x43, 0xe4, 0xf5, 0x60, 0xf2, 0x4a, 0x56, + 0x5c, 0x1c, 0x2e, 0x50, 0x25, 0x42, 0x12, 0x5c, 0xec, 0xc5, 0xa9, 0xc9, 0xf9, 0x79, 0x29, 0xc5, + 0x12, 0x8c, 0x0a, 0x8c, 0x1a, 0xcc, 0x41, 0x30, 0xae, 0x90, 0x08, 0x17, 0x6b, 0x5e, 0x62, 0x5e, + 0x7e, 0xb1, 0x04, 0x93, 0x02, 0xa3, 0x06, 0x6b, 0x10, 0x84, 0xe3, 0x54, 0xc3, 0x25, 0x9c, 0x9c, + 0x9f, 0xab, 0x87, 0x66, 0xa4, 0x13, 0x2f, 0xcc, 0xc0, 0x00, 0x90, 0x48, 0x00, 0x63, 0x94, 0x56, + 0x7a, 0x66, 0x49, 0x46, 0x69, 0x92, 0x5e, 0x72, 0x7e, 0xae, 0x7e, 0x7a, 0x7e, 0x4e, 0x62, 0x5e, + 0x3a, 0xc2, 0x7d, 0x05, 0x25, 0x95, 0x05, 0xa9, 0xc5, 0x70, 0x67, 0xfe, 0x60, 0x64, 0x5c, 0xc4, + 0xc4, 0xec, 0x1e, 0xe0, 0xb4, 0x8a, 0x49, 0xce, 0x1d, 0x62, 0x6e, 0x00, 0x54, 0xa9, 0x5e, 0x78, + 0x6a, 0x4e, 0x8e, 0x77, 0x5e, 0x7e, 0x79, 0x5e, 0x08, 0x48, 0x4b, 0x12, 0x1b, 0xd8, 0x0c, 0x63, + 0x40, 0x00, 0x00, 0x00, 0xff, 0xff, 0xdc, 0x84, 0x30, 0xff, 0xf3, 0x00, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/ptypes/duration/duration.proto b/vendor/github.com/golang/protobuf/ptypes/duration/duration.proto new file mode 100644 index 000000000..975fce41a --- /dev/null +++ b/vendor/github.com/golang/protobuf/ptypes/duration/duration.proto @@ -0,0 +1,117 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option cc_enable_arenas = true; +option go_package = "github.com/golang/protobuf/ptypes/duration"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "DurationProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; + +// A Duration represents a signed, fixed-length span of time represented +// as a count of seconds and fractions of seconds at nanosecond +// resolution. It is independent of any calendar and concepts like "day" +// or "month". It is related to Timestamp in that the difference between +// two Timestamp values is a Duration and it can be added or subtracted +// from a Timestamp. Range is approximately +-10,000 years. +// +// # Examples +// +// Example 1: Compute Duration from two Timestamps in pseudo code. +// +// Timestamp start = ...; +// Timestamp end = ...; +// Duration duration = ...; +// +// duration.seconds = end.seconds - start.seconds; +// duration.nanos = end.nanos - start.nanos; +// +// if (duration.seconds < 0 && duration.nanos > 0) { +// duration.seconds += 1; +// duration.nanos -= 1000000000; +// } else if (durations.seconds > 0 && duration.nanos < 0) { +// duration.seconds -= 1; +// duration.nanos += 1000000000; +// } +// +// Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. +// +// Timestamp start = ...; +// Duration duration = ...; +// Timestamp end = ...; +// +// end.seconds = start.seconds + duration.seconds; +// end.nanos = start.nanos + duration.nanos; +// +// if (end.nanos < 0) { +// end.seconds -= 1; +// end.nanos += 1000000000; +// } else if (end.nanos >= 1000000000) { +// end.seconds += 1; +// end.nanos -= 1000000000; +// } +// +// Example 3: Compute Duration from datetime.timedelta in Python. +// +// td = datetime.timedelta(days=3, minutes=10) +// duration = Duration() +// duration.FromTimedelta(td) +// +// # JSON Mapping +// +// In JSON format, the Duration type is encoded as a string rather than an +// object, where the string ends in the suffix "s" (indicating seconds) and +// is preceded by the number of seconds, with nanoseconds expressed as +// fractional seconds. For example, 3 seconds with 0 nanoseconds should be +// encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should +// be expressed in JSON format as "3.000000001s", and 3 seconds and 1 +// microsecond should be expressed in JSON format as "3.000001s". +// +// +message Duration { + + // Signed seconds of the span of time. Must be from -315,576,000,000 + // to +315,576,000,000 inclusive. Note: these bounds are computed from: + // 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years + int64 seconds = 1; + + // Signed fractions of a second at nanosecond resolution of the span + // of time. Durations less than one second are represented with a 0 + // `seconds` field and a positive or negative `nanos` field. For durations + // of one second or more, a non-zero value for the `nanos` field must be + // of the same sign as the `seconds` field. Must be from -999,999,999 + // to +999,999,999 inclusive. + int32 nanos = 2; +} diff --git a/vendor/github.com/golang/protobuf/ptypes/duration_test.go b/vendor/github.com/golang/protobuf/ptypes/duration_test.go new file mode 100644 index 000000000..e00491a34 --- /dev/null +++ b/vendor/github.com/golang/protobuf/ptypes/duration_test.go @@ -0,0 +1,121 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2016 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package ptypes + +import ( + "math" + "testing" + "time" + + "github.com/golang/protobuf/proto" + durpb "github.com/golang/protobuf/ptypes/duration" +) + +const ( + minGoSeconds = math.MinInt64 / int64(1e9) + maxGoSeconds = math.MaxInt64 / int64(1e9) +) + +var durationTests = []struct { + proto *durpb.Duration + isValid bool + inRange bool + dur time.Duration +}{ + // The zero duration. + {&durpb.Duration{Seconds: 0, Nanos: 0}, true, true, 0}, + // Some ordinary non-zero durations. + {&durpb.Duration{Seconds: 100, Nanos: 0}, true, true, 100 * time.Second}, + {&durpb.Duration{Seconds: -100, Nanos: 0}, true, true, -100 * time.Second}, + {&durpb.Duration{Seconds: 100, Nanos: 987}, true, true, 100*time.Second + 987}, + {&durpb.Duration{Seconds: -100, Nanos: -987}, true, true, -(100*time.Second + 987)}, + // The largest duration representable in Go. + {&durpb.Duration{Seconds: maxGoSeconds, Nanos: int32(math.MaxInt64 - 1e9*maxGoSeconds)}, true, true, math.MaxInt64}, + // The smallest duration representable in Go. + {&durpb.Duration{Seconds: minGoSeconds, Nanos: int32(math.MinInt64 - 1e9*minGoSeconds)}, true, true, math.MinInt64}, + {nil, false, false, 0}, + {&durpb.Duration{Seconds: -100, Nanos: 987}, false, false, 0}, + {&durpb.Duration{Seconds: 100, Nanos: -987}, false, false, 0}, + {&durpb.Duration{Seconds: math.MinInt64, Nanos: 0}, false, false, 0}, + {&durpb.Duration{Seconds: math.MaxInt64, Nanos: 0}, false, false, 0}, + // The largest valid duration. + {&durpb.Duration{Seconds: maxSeconds, Nanos: 1e9 - 1}, true, false, 0}, + // The smallest valid duration. + {&durpb.Duration{Seconds: minSeconds, Nanos: -(1e9 - 1)}, true, false, 0}, + // The smallest invalid duration above the valid range. + {&durpb.Duration{Seconds: maxSeconds + 1, Nanos: 0}, false, false, 0}, + // The largest invalid duration below the valid range. + {&durpb.Duration{Seconds: minSeconds - 1, Nanos: -(1e9 - 1)}, false, false, 0}, + // One nanosecond past the largest duration representable in Go. + {&durpb.Duration{Seconds: maxGoSeconds, Nanos: int32(math.MaxInt64-1e9*maxGoSeconds) + 1}, true, false, 0}, + // One nanosecond past the smallest duration representable in Go. + {&durpb.Duration{Seconds: minGoSeconds, Nanos: int32(math.MinInt64-1e9*minGoSeconds) - 1}, true, false, 0}, + // One second past the largest duration representable in Go. + {&durpb.Duration{Seconds: maxGoSeconds + 1, Nanos: int32(math.MaxInt64 - 1e9*maxGoSeconds)}, true, false, 0}, + // One second past the smallest duration representable in Go. + {&durpb.Duration{Seconds: minGoSeconds - 1, Nanos: int32(math.MinInt64 - 1e9*minGoSeconds)}, true, false, 0}, +} + +func TestValidateDuration(t *testing.T) { + for _, test := range durationTests { + err := validateDuration(test.proto) + gotValid := (err == nil) + if gotValid != test.isValid { + t.Errorf("validateDuration(%v) = %t, want %t", test.proto, gotValid, test.isValid) + } + } +} + +func TestDuration(t *testing.T) { + for _, test := range durationTests { + got, err := Duration(test.proto) + gotOK := (err == nil) + wantOK := test.isValid && test.inRange + if gotOK != wantOK { + t.Errorf("Duration(%v) ok = %t, want %t", test.proto, gotOK, wantOK) + } + if err == nil && got != test.dur { + t.Errorf("Duration(%v) = %v, want %v", test.proto, got, test.dur) + } + } +} + +func TestDurationProto(t *testing.T) { + for _, test := range durationTests { + if test.isValid && test.inRange { + got := DurationProto(test.dur) + if !proto.Equal(got, test.proto) { + t.Errorf("DurationProto(%v) = %v, want %v", test.dur, got, test.proto) + } + } + } +} diff --git a/vendor/github.com/golang/protobuf/ptypes/empty/empty.pb.go b/vendor/github.com/golang/protobuf/ptypes/empty/empty.pb.go new file mode 100644 index 000000000..e877b72c3 --- /dev/null +++ b/vendor/github.com/golang/protobuf/ptypes/empty/empty.pb.go @@ -0,0 +1,66 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/protobuf/empty.proto + +/* +Package empty is a generated protocol buffer package. + +It is generated from these files: + google/protobuf/empty.proto + +It has these top-level messages: + Empty +*/ +package empty + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A generic empty message that you can re-use to avoid defining duplicated +// empty messages in your APIs. A typical example is to use it as the request +// or the response type of an API method. For instance: +// +// service Foo { +// rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); +// } +// +// The JSON representation for `Empty` is empty JSON object `{}`. +type Empty struct { +} + +func (m *Empty) Reset() { *m = Empty{} } +func (m *Empty) String() string { return proto.CompactTextString(m) } +func (*Empty) ProtoMessage() {} +func (*Empty) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } +func (*Empty) XXX_WellKnownType() string { return "Empty" } + +func init() { + proto.RegisterType((*Empty)(nil), "google.protobuf.Empty") +} + +func init() { proto.RegisterFile("google/protobuf/empty.proto", fileDescriptor0) } + +var fileDescriptor0 = []byte{ + // 148 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4e, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x4f, 0xcd, 0x2d, 0x28, + 0xa9, 0xd4, 0x03, 0x73, 0x85, 0xf8, 0x21, 0x92, 0x7a, 0x30, 0x49, 0x25, 0x76, 0x2e, 0x56, 0x57, + 0x90, 0xbc, 0x53, 0x19, 0x97, 0x70, 0x72, 0x7e, 0xae, 0x1e, 0x9a, 0xbc, 0x13, 0x17, 0x58, 0x36, + 0x00, 0xc4, 0x0d, 0x60, 0x8c, 0x52, 0x4f, 0xcf, 0x2c, 0xc9, 0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, + 0xd5, 0x4f, 0xcf, 0xcf, 0x49, 0xcc, 0x4b, 0x47, 0x58, 0x53, 0x50, 0x52, 0x59, 0x90, 0x5a, 0x0c, + 0xb1, 0xed, 0x07, 0x23, 0xe3, 0x22, 0x26, 0x66, 0xf7, 0x00, 0xa7, 0x55, 0x4c, 0x72, 0xee, 0x10, + 0x13, 0x03, 0xa0, 0xea, 0xf4, 0xc2, 0x53, 0x73, 0x72, 0xbc, 0xf3, 0xf2, 0xcb, 0xf3, 0x42, 0x40, + 0xea, 0x93, 0xd8, 0xc0, 0x06, 0x18, 0x03, 0x02, 0x00, 0x00, 0xff, 0xff, 0x64, 0xd4, 0xb3, 0xa6, + 0xb7, 0x00, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/ptypes/empty/empty.proto b/vendor/github.com/golang/protobuf/ptypes/empty/empty.proto new file mode 100644 index 000000000..03cacd233 --- /dev/null +++ b/vendor/github.com/golang/protobuf/ptypes/empty/empty.proto @@ -0,0 +1,52 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option go_package = "github.com/golang/protobuf/ptypes/empty"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "EmptyProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option cc_enable_arenas = true; + +// A generic empty message that you can re-use to avoid defining duplicated +// empty messages in your APIs. A typical example is to use it as the request +// or the response type of an API method. For instance: +// +// service Foo { +// rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); +// } +// +// The JSON representation for `Empty` is empty JSON object `{}`. +message Empty {} diff --git a/vendor/github.com/golang/protobuf/ptypes/regen.sh b/vendor/github.com/golang/protobuf/ptypes/regen.sh new file mode 100755 index 000000000..b50a9414a --- /dev/null +++ b/vendor/github.com/golang/protobuf/ptypes/regen.sh @@ -0,0 +1,43 @@ +#!/bin/bash -e +# +# This script fetches and rebuilds the "well-known types" protocol buffers. +# To run this you will need protoc and goprotobuf installed; +# see https://github.com/golang/protobuf for instructions. +# You also need Go and Git installed. + +PKG=github.com/golang/protobuf/ptypes +UPSTREAM=https://github.com/google/protobuf +UPSTREAM_SUBDIR=src/google/protobuf +PROTO_FILES=(any duration empty struct timestamp wrappers) + +function die() { + echo 1>&2 $* + exit 1 +} + +# Sanity check that the right tools are accessible. +for tool in go git protoc protoc-gen-go; do + q=$(which $tool) || die "didn't find $tool" + echo 1>&2 "$tool: $q" +done + +tmpdir=$(mktemp -d -t regen-wkt.XXXXXX) +trap 'rm -rf $tmpdir' EXIT + +echo -n 1>&2 "finding package dir... " +pkgdir=$(go list -f '{{.Dir}}' $PKG) +echo 1>&2 $pkgdir +base=$(echo $pkgdir | sed "s,/$PKG\$,,") +echo 1>&2 "base: $base" +cd "$base" + +echo 1>&2 "fetching latest protos... " +git clone -q $UPSTREAM $tmpdir + +for file in ${PROTO_FILES[@]}; do + echo 1>&2 "* $file" + protoc --go_out=. -I$tmpdir/src $tmpdir/src/google/protobuf/$file.proto || die + cp $tmpdir/src/google/protobuf/$file.proto $PKG/$file +done + +echo 1>&2 "All OK" diff --git a/vendor/github.com/golang/protobuf/ptypes/struct/struct.pb.go b/vendor/github.com/golang/protobuf/ptypes/struct/struct.pb.go new file mode 100644 index 000000000..4cfe60818 --- /dev/null +++ b/vendor/github.com/golang/protobuf/ptypes/struct/struct.pb.go @@ -0,0 +1,380 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/protobuf/struct.proto + +/* +Package structpb is a generated protocol buffer package. + +It is generated from these files: + google/protobuf/struct.proto + +It has these top-level messages: + Struct + Value + ListValue +*/ +package structpb + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// `NullValue` is a singleton enumeration to represent the null value for the +// `Value` type union. +// +// The JSON representation for `NullValue` is JSON `null`. +type NullValue int32 + +const ( + // Null value. + NullValue_NULL_VALUE NullValue = 0 +) + +var NullValue_name = map[int32]string{ + 0: "NULL_VALUE", +} +var NullValue_value = map[string]int32{ + "NULL_VALUE": 0, +} + +func (x NullValue) String() string { + return proto.EnumName(NullValue_name, int32(x)) +} +func (NullValue) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } +func (NullValue) XXX_WellKnownType() string { return "NullValue" } + +// `Struct` represents a structured data value, consisting of fields +// which map to dynamically typed values. In some languages, `Struct` +// might be supported by a native representation. For example, in +// scripting languages like JS a struct is represented as an +// object. The details of that representation are described together +// with the proto support for the language. +// +// The JSON representation for `Struct` is JSON object. +type Struct struct { + // Unordered map of dynamically typed values. + Fields map[string]*Value `protobuf:"bytes,1,rep,name=fields" json:"fields,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` +} + +func (m *Struct) Reset() { *m = Struct{} } +func (m *Struct) String() string { return proto.CompactTextString(m) } +func (*Struct) ProtoMessage() {} +func (*Struct) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } +func (*Struct) XXX_WellKnownType() string { return "Struct" } + +func (m *Struct) GetFields() map[string]*Value { + if m != nil { + return m.Fields + } + return nil +} + +// `Value` represents a dynamically typed value which can be either +// null, a number, a string, a boolean, a recursive struct value, or a +// list of values. A producer of value is expected to set one of that +// variants, absence of any variant indicates an error. +// +// The JSON representation for `Value` is JSON value. +type Value struct { + // The kind of value. + // + // Types that are valid to be assigned to Kind: + // *Value_NullValue + // *Value_NumberValue + // *Value_StringValue + // *Value_BoolValue + // *Value_StructValue + // *Value_ListValue + Kind isValue_Kind `protobuf_oneof:"kind"` +} + +func (m *Value) Reset() { *m = Value{} } +func (m *Value) String() string { return proto.CompactTextString(m) } +func (*Value) ProtoMessage() {} +func (*Value) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } +func (*Value) XXX_WellKnownType() string { return "Value" } + +type isValue_Kind interface { + isValue_Kind() +} + +type Value_NullValue struct { + NullValue NullValue `protobuf:"varint,1,opt,name=null_value,json=nullValue,enum=google.protobuf.NullValue,oneof"` +} +type Value_NumberValue struct { + NumberValue float64 `protobuf:"fixed64,2,opt,name=number_value,json=numberValue,oneof"` +} +type Value_StringValue struct { + StringValue string `protobuf:"bytes,3,opt,name=string_value,json=stringValue,oneof"` +} +type Value_BoolValue struct { + BoolValue bool `protobuf:"varint,4,opt,name=bool_value,json=boolValue,oneof"` +} +type Value_StructValue struct { + StructValue *Struct `protobuf:"bytes,5,opt,name=struct_value,json=structValue,oneof"` +} +type Value_ListValue struct { + ListValue *ListValue `protobuf:"bytes,6,opt,name=list_value,json=listValue,oneof"` +} + +func (*Value_NullValue) isValue_Kind() {} +func (*Value_NumberValue) isValue_Kind() {} +func (*Value_StringValue) isValue_Kind() {} +func (*Value_BoolValue) isValue_Kind() {} +func (*Value_StructValue) isValue_Kind() {} +func (*Value_ListValue) isValue_Kind() {} + +func (m *Value) GetKind() isValue_Kind { + if m != nil { + return m.Kind + } + return nil +} + +func (m *Value) GetNullValue() NullValue { + if x, ok := m.GetKind().(*Value_NullValue); ok { + return x.NullValue + } + return NullValue_NULL_VALUE +} + +func (m *Value) GetNumberValue() float64 { + if x, ok := m.GetKind().(*Value_NumberValue); ok { + return x.NumberValue + } + return 0 +} + +func (m *Value) GetStringValue() string { + if x, ok := m.GetKind().(*Value_StringValue); ok { + return x.StringValue + } + return "" +} + +func (m *Value) GetBoolValue() bool { + if x, ok := m.GetKind().(*Value_BoolValue); ok { + return x.BoolValue + } + return false +} + +func (m *Value) GetStructValue() *Struct { + if x, ok := m.GetKind().(*Value_StructValue); ok { + return x.StructValue + } + return nil +} + +func (m *Value) GetListValue() *ListValue { + if x, ok := m.GetKind().(*Value_ListValue); ok { + return x.ListValue + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Value) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Value_OneofMarshaler, _Value_OneofUnmarshaler, _Value_OneofSizer, []interface{}{ + (*Value_NullValue)(nil), + (*Value_NumberValue)(nil), + (*Value_StringValue)(nil), + (*Value_BoolValue)(nil), + (*Value_StructValue)(nil), + (*Value_ListValue)(nil), + } +} + +func _Value_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Value) + // kind + switch x := m.Kind.(type) { + case *Value_NullValue: + b.EncodeVarint(1<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.NullValue)) + case *Value_NumberValue: + b.EncodeVarint(2<<3 | proto.WireFixed64) + b.EncodeFixed64(math.Float64bits(x.NumberValue)) + case *Value_StringValue: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.StringValue) + case *Value_BoolValue: + t := uint64(0) + if x.BoolValue { + t = 1 + } + b.EncodeVarint(4<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *Value_StructValue: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.StructValue); err != nil { + return err + } + case *Value_ListValue: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ListValue); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Value.Kind has unexpected type %T", x) + } + return nil +} + +func _Value_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Value) + switch tag { + case 1: // kind.null_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Kind = &Value_NullValue{NullValue(x)} + return true, err + case 2: // kind.number_value + if wire != proto.WireFixed64 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed64() + m.Kind = &Value_NumberValue{math.Float64frombits(x)} + return true, err + case 3: // kind.string_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Kind = &Value_StringValue{x} + return true, err + case 4: // kind.bool_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Kind = &Value_BoolValue{x != 0} + return true, err + case 5: // kind.struct_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Struct) + err := b.DecodeMessage(msg) + m.Kind = &Value_StructValue{msg} + return true, err + case 6: // kind.list_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ListValue) + err := b.DecodeMessage(msg) + m.Kind = &Value_ListValue{msg} + return true, err + default: + return false, nil + } +} + +func _Value_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Value) + // kind + switch x := m.Kind.(type) { + case *Value_NullValue: + n += proto.SizeVarint(1<<3 | proto.WireVarint) + n += proto.SizeVarint(uint64(x.NullValue)) + case *Value_NumberValue: + n += proto.SizeVarint(2<<3 | proto.WireFixed64) + n += 8 + case *Value_StringValue: + n += proto.SizeVarint(3<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(len(x.StringValue))) + n += len(x.StringValue) + case *Value_BoolValue: + n += proto.SizeVarint(4<<3 | proto.WireVarint) + n += 1 + case *Value_StructValue: + s := proto.Size(x.StructValue) + n += proto.SizeVarint(5<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *Value_ListValue: + s := proto.Size(x.ListValue) + n += proto.SizeVarint(6<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// `ListValue` is a wrapper around a repeated field of values. +// +// The JSON representation for `ListValue` is JSON array. +type ListValue struct { + // Repeated field of dynamically typed values. + Values []*Value `protobuf:"bytes,1,rep,name=values" json:"values,omitempty"` +} + +func (m *ListValue) Reset() { *m = ListValue{} } +func (m *ListValue) String() string { return proto.CompactTextString(m) } +func (*ListValue) ProtoMessage() {} +func (*ListValue) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } +func (*ListValue) XXX_WellKnownType() string { return "ListValue" } + +func (m *ListValue) GetValues() []*Value { + if m != nil { + return m.Values + } + return nil +} + +func init() { + proto.RegisterType((*Struct)(nil), "google.protobuf.Struct") + proto.RegisterType((*Value)(nil), "google.protobuf.Value") + proto.RegisterType((*ListValue)(nil), "google.protobuf.ListValue") + proto.RegisterEnum("google.protobuf.NullValue", NullValue_name, NullValue_value) +} + +func init() { proto.RegisterFile("google/protobuf/struct.proto", fileDescriptor0) } + +var fileDescriptor0 = []byte{ + // 417 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x92, 0x41, 0x8b, 0xd3, 0x40, + 0x14, 0xc7, 0x3b, 0xc9, 0x36, 0x98, 0x17, 0x59, 0x97, 0x11, 0xb4, 0xac, 0xa2, 0xa1, 0x7b, 0x09, + 0x22, 0x29, 0xd6, 0x8b, 0x18, 0x2f, 0x06, 0xd6, 0x5d, 0x30, 0x2c, 0x31, 0xba, 0x15, 0xbc, 0x94, + 0x26, 0x4d, 0x63, 0xe8, 0x74, 0x26, 0x24, 0x33, 0x4a, 0x8f, 0x7e, 0x0b, 0xcf, 0x1e, 0x3d, 0xfa, + 0xe9, 0x3c, 0xca, 0xcc, 0x24, 0xa9, 0xb4, 0xf4, 0x94, 0xbc, 0xf7, 0x7e, 0xef, 0x3f, 0xef, 0xff, + 0x66, 0xe0, 0x71, 0xc1, 0x58, 0x41, 0xf2, 0x49, 0x55, 0x33, 0xce, 0x52, 0xb1, 0x9a, 0x34, 0xbc, + 0x16, 0x19, 0xf7, 0x55, 0x8c, 0xef, 0xe9, 0xaa, 0xdf, 0x55, 0xc7, 0x3f, 0x11, 0x58, 0x1f, 0x15, + 0x81, 0x03, 0xb0, 0x56, 0x65, 0x4e, 0x96, 0xcd, 0x08, 0xb9, 0xa6, 0xe7, 0x4c, 0x2f, 0xfc, 0x3d, + 0xd8, 0xd7, 0xa0, 0xff, 0x4e, 0x51, 0x97, 0x94, 0xd7, 0xdb, 0xa4, 0x6d, 0x39, 0xff, 0x00, 0xce, + 0x7f, 0x69, 0x7c, 0x06, 0xe6, 0x3a, 0xdf, 0x8e, 0x90, 0x8b, 0x3c, 0x3b, 0x91, 0xbf, 0xf8, 0x39, + 0x0c, 0xbf, 0x2d, 0x88, 0xc8, 0x47, 0x86, 0x8b, 0x3c, 0x67, 0xfa, 0xe0, 0x40, 0x7c, 0x26, 0xab, + 0x89, 0x86, 0x5e, 0x1b, 0xaf, 0xd0, 0xf8, 0x8f, 0x01, 0x43, 0x95, 0xc4, 0x01, 0x00, 0x15, 0x84, + 0xcc, 0xb5, 0x80, 0x14, 0x3d, 0x9d, 0x9e, 0x1f, 0x08, 0xdc, 0x08, 0x42, 0x14, 0x7f, 0x3d, 0x48, + 0x6c, 0xda, 0x05, 0xf8, 0x02, 0xee, 0x52, 0xb1, 0x49, 0xf3, 0x7a, 0xbe, 0x3b, 0x1f, 0x5d, 0x0f, + 0x12, 0x47, 0x67, 0x7b, 0xa8, 0xe1, 0x75, 0x49, 0x8b, 0x16, 0x32, 0xe5, 0xe0, 0x12, 0xd2, 0x59, + 0x0d, 0x3d, 0x05, 0x48, 0x19, 0xeb, 0xc6, 0x38, 0x71, 0x91, 0x77, 0x47, 0x1e, 0x25, 0x73, 0x1a, + 0x78, 0xa3, 0x54, 0x44, 0xc6, 0x5b, 0x64, 0xa8, 0xac, 0x3e, 0x3c, 0xb2, 0xc7, 0x56, 0x5e, 0x64, + 0xbc, 0x77, 0x49, 0xca, 0xa6, 0xeb, 0xb5, 0x54, 0xef, 0xa1, 0xcb, 0xa8, 0x6c, 0x78, 0xef, 0x92, + 0x74, 0x41, 0x68, 0xc1, 0xc9, 0xba, 0xa4, 0xcb, 0x71, 0x00, 0x76, 0x4f, 0x60, 0x1f, 0x2c, 0x25, + 0xd6, 0xdd, 0xe8, 0xb1, 0xa5, 0xb7, 0xd4, 0xb3, 0x47, 0x60, 0xf7, 0x4b, 0xc4, 0xa7, 0x00, 0x37, + 0xb7, 0x51, 0x34, 0x9f, 0xbd, 0x8d, 0x6e, 0x2f, 0xcf, 0x06, 0xe1, 0x0f, 0x04, 0xf7, 0x33, 0xb6, + 0xd9, 0x97, 0x08, 0x1d, 0xed, 0x26, 0x96, 0x71, 0x8c, 0xbe, 0xbc, 0x28, 0x4a, 0xfe, 0x55, 0xa4, + 0x7e, 0xc6, 0x36, 0x93, 0x82, 0x91, 0x05, 0x2d, 0x76, 0x4f, 0xb1, 0xe2, 0xdb, 0x2a, 0x6f, 0xda, + 0x17, 0x19, 0xe8, 0x4f, 0x95, 0xfe, 0x45, 0xe8, 0x97, 0x61, 0x5e, 0xc5, 0xe1, 0x6f, 0xe3, 0xc9, + 0x95, 0x16, 0x8f, 0xbb, 0xf9, 0x3e, 0xe7, 0x84, 0xbc, 0xa7, 0xec, 0x3b, 0xfd, 0x24, 0x3b, 0x53, + 0x4b, 0x49, 0xbd, 0xfc, 0x17, 0x00, 0x00, 0xff, 0xff, 0xe8, 0x1b, 0x59, 0xf8, 0xe5, 0x02, 0x00, + 0x00, +} diff --git a/vendor/github.com/golang/protobuf/ptypes/struct/struct.proto b/vendor/github.com/golang/protobuf/ptypes/struct/struct.proto new file mode 100644 index 000000000..7d7808e7f --- /dev/null +++ b/vendor/github.com/golang/protobuf/ptypes/struct/struct.proto @@ -0,0 +1,96 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option cc_enable_arenas = true; +option go_package = "github.com/golang/protobuf/ptypes/struct;structpb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "StructProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; + + +// `Struct` represents a structured data value, consisting of fields +// which map to dynamically typed values. In some languages, `Struct` +// might be supported by a native representation. For example, in +// scripting languages like JS a struct is represented as an +// object. The details of that representation are described together +// with the proto support for the language. +// +// The JSON representation for `Struct` is JSON object. +message Struct { + // Unordered map of dynamically typed values. + map fields = 1; +} + +// `Value` represents a dynamically typed value which can be either +// null, a number, a string, a boolean, a recursive struct value, or a +// list of values. A producer of value is expected to set one of that +// variants, absence of any variant indicates an error. +// +// The JSON representation for `Value` is JSON value. +message Value { + // The kind of value. + oneof kind { + // Represents a null value. + NullValue null_value = 1; + // Represents a double value. + double number_value = 2; + // Represents a string value. + string string_value = 3; + // Represents a boolean value. + bool bool_value = 4; + // Represents a structured value. + Struct struct_value = 5; + // Represents a repeated `Value`. + ListValue list_value = 6; + } +} + +// `NullValue` is a singleton enumeration to represent the null value for the +// `Value` type union. +// +// The JSON representation for `NullValue` is JSON `null`. +enum NullValue { + // Null value. + NULL_VALUE = 0; +} + +// `ListValue` is a wrapper around a repeated field of values. +// +// The JSON representation for `ListValue` is JSON array. +message ListValue { + // Repeated field of dynamically typed values. + repeated Value values = 1; +} diff --git a/vendor/github.com/golang/protobuf/ptypes/timestamp.go b/vendor/github.com/golang/protobuf/ptypes/timestamp.go new file mode 100644 index 000000000..47f10dbc2 --- /dev/null +++ b/vendor/github.com/golang/protobuf/ptypes/timestamp.go @@ -0,0 +1,134 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2016 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package ptypes + +// This file implements operations on google.protobuf.Timestamp. + +import ( + "errors" + "fmt" + "time" + + tspb "github.com/golang/protobuf/ptypes/timestamp" +) + +const ( + // Seconds field of the earliest valid Timestamp. + // This is time.Date(1, 1, 1, 0, 0, 0, 0, time.UTC).Unix(). + minValidSeconds = -62135596800 + // Seconds field just after the latest valid Timestamp. + // This is time.Date(10000, 1, 1, 0, 0, 0, 0, time.UTC).Unix(). + maxValidSeconds = 253402300800 +) + +// validateTimestamp determines whether a Timestamp is valid. +// A valid timestamp represents a time in the range +// [0001-01-01, 10000-01-01) and has a Nanos field +// in the range [0, 1e9). +// +// If the Timestamp is valid, validateTimestamp returns nil. +// Otherwise, it returns an error that describes +// the problem. +// +// Every valid Timestamp can be represented by a time.Time, but the converse is not true. +func validateTimestamp(ts *tspb.Timestamp) error { + if ts == nil { + return errors.New("timestamp: nil Timestamp") + } + if ts.Seconds < minValidSeconds { + return fmt.Errorf("timestamp: %v before 0001-01-01", ts) + } + if ts.Seconds >= maxValidSeconds { + return fmt.Errorf("timestamp: %v after 10000-01-01", ts) + } + if ts.Nanos < 0 || ts.Nanos >= 1e9 { + return fmt.Errorf("timestamp: %v: nanos not in range [0, 1e9)", ts) + } + return nil +} + +// Timestamp converts a google.protobuf.Timestamp proto to a time.Time. +// It returns an error if the argument is invalid. +// +// Unlike most Go functions, if Timestamp returns an error, the first return value +// is not the zero time.Time. Instead, it is the value obtained from the +// time.Unix function when passed the contents of the Timestamp, in the UTC +// locale. This may or may not be a meaningful time; many invalid Timestamps +// do map to valid time.Times. +// +// A nil Timestamp returns an error. The first return value in that case is +// undefined. +func Timestamp(ts *tspb.Timestamp) (time.Time, error) { + // Don't return the zero value on error, because corresponds to a valid + // timestamp. Instead return whatever time.Unix gives us. + var t time.Time + if ts == nil { + t = time.Unix(0, 0).UTC() // treat nil like the empty Timestamp + } else { + t = time.Unix(ts.Seconds, int64(ts.Nanos)).UTC() + } + return t, validateTimestamp(ts) +} + +// TimestampNow returns a google.protobuf.Timestamp for the current time. +func TimestampNow() *tspb.Timestamp { + ts, err := TimestampProto(time.Now()) + if err != nil { + panic("ptypes: time.Now() out of Timestamp range") + } + return ts +} + +// TimestampProto converts the time.Time to a google.protobuf.Timestamp proto. +// It returns an error if the resulting Timestamp is invalid. +func TimestampProto(t time.Time) (*tspb.Timestamp, error) { + seconds := t.Unix() + nanos := int32(t.Sub(time.Unix(seconds, 0))) + ts := &tspb.Timestamp{ + Seconds: seconds, + Nanos: nanos, + } + if err := validateTimestamp(ts); err != nil { + return nil, err + } + return ts, nil +} + +// TimestampString returns the RFC 3339 string for valid Timestamps. For invalid +// Timestamps, it returns an error message in parentheses. +func TimestampString(ts *tspb.Timestamp) string { + t, err := Timestamp(ts) + if err != nil { + return fmt.Sprintf("(%v)", err) + } + return t.Format(time.RFC3339Nano) +} diff --git a/vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.pb.go b/vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.pb.go new file mode 100644 index 000000000..e23e4a25d --- /dev/null +++ b/vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.pb.go @@ -0,0 +1,160 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/protobuf/timestamp.proto + +/* +Package timestamp is a generated protocol buffer package. + +It is generated from these files: + google/protobuf/timestamp.proto + +It has these top-level messages: + Timestamp +*/ +package timestamp + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A Timestamp represents a point in time independent of any time zone +// or calendar, represented as seconds and fractions of seconds at +// nanosecond resolution in UTC Epoch time. It is encoded using the +// Proleptic Gregorian Calendar which extends the Gregorian calendar +// backwards to year one. It is encoded assuming all minutes are 60 +// seconds long, i.e. leap seconds are "smeared" so that no leap second +// table is needed for interpretation. Range is from +// 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. +// By restricting to that range, we ensure that we can convert to +// and from RFC 3339 date strings. +// See [https://www.ietf.org/rfc/rfc3339.txt](https://www.ietf.org/rfc/rfc3339.txt). +// +// # Examples +// +// Example 1: Compute Timestamp from POSIX `time()`. +// +// Timestamp timestamp; +// timestamp.set_seconds(time(NULL)); +// timestamp.set_nanos(0); +// +// Example 2: Compute Timestamp from POSIX `gettimeofday()`. +// +// struct timeval tv; +// gettimeofday(&tv, NULL); +// +// Timestamp timestamp; +// timestamp.set_seconds(tv.tv_sec); +// timestamp.set_nanos(tv.tv_usec * 1000); +// +// Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. +// +// FILETIME ft; +// GetSystemTimeAsFileTime(&ft); +// UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; +// +// // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z +// // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. +// Timestamp timestamp; +// timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); +// timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); +// +// Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. +// +// long millis = System.currentTimeMillis(); +// +// Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) +// .setNanos((int) ((millis % 1000) * 1000000)).build(); +// +// +// Example 5: Compute Timestamp from current time in Python. +// +// timestamp = Timestamp() +// timestamp.GetCurrentTime() +// +// # JSON Mapping +// +// In JSON format, the Timestamp type is encoded as a string in the +// [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the +// format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" +// where {year} is always expressed using four digits while {month}, {day}, +// {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional +// seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), +// are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone +// is required, though only UTC (as indicated by "Z") is presently supported. +// +// For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past +// 01:30 UTC on January 15, 2017. +// +// In JavaScript, one can convert a Date object to this format using the +// standard [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString] +// method. In Python, a standard `datetime.datetime` object can be converted +// to this format using [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) +// with the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one +// can use the Joda Time's [`ISODateTimeFormat.dateTime()`]( +// http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime()) +// to obtain a formatter capable of generating timestamps in this format. +// +// +type Timestamp struct { + // Represents seconds of UTC time since Unix epoch + // 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to + // 9999-12-31T23:59:59Z inclusive. + Seconds int64 `protobuf:"varint,1,opt,name=seconds" json:"seconds,omitempty"` + // Non-negative fractions of a second at nanosecond resolution. Negative + // second values with fractions must still have non-negative nanos values + // that count forward in time. Must be from 0 to 999,999,999 + // inclusive. + Nanos int32 `protobuf:"varint,2,opt,name=nanos" json:"nanos,omitempty"` +} + +func (m *Timestamp) Reset() { *m = Timestamp{} } +func (m *Timestamp) String() string { return proto.CompactTextString(m) } +func (*Timestamp) ProtoMessage() {} +func (*Timestamp) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } +func (*Timestamp) XXX_WellKnownType() string { return "Timestamp" } + +func (m *Timestamp) GetSeconds() int64 { + if m != nil { + return m.Seconds + } + return 0 +} + +func (m *Timestamp) GetNanos() int32 { + if m != nil { + return m.Nanos + } + return 0 +} + +func init() { + proto.RegisterType((*Timestamp)(nil), "google.protobuf.Timestamp") +} + +func init() { proto.RegisterFile("google/protobuf/timestamp.proto", fileDescriptor0) } + +var fileDescriptor0 = []byte{ + // 191 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4f, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x2f, 0xc9, 0xcc, 0x4d, + 0x2d, 0x2e, 0x49, 0xcc, 0x2d, 0xd0, 0x03, 0x0b, 0x09, 0xf1, 0x43, 0x14, 0xe8, 0xc1, 0x14, 0x28, + 0x59, 0x73, 0x71, 0x86, 0xc0, 0xd4, 0x08, 0x49, 0x70, 0xb1, 0x17, 0xa7, 0x26, 0xe7, 0xe7, 0xa5, + 0x14, 0x4b, 0x30, 0x2a, 0x30, 0x6a, 0x30, 0x07, 0xc1, 0xb8, 0x42, 0x22, 0x5c, 0xac, 0x79, 0x89, + 0x79, 0xf9, 0xc5, 0x12, 0x4c, 0x0a, 0x8c, 0x1a, 0xac, 0x41, 0x10, 0x8e, 0x53, 0x1d, 0x97, 0x70, + 0x72, 0x7e, 0xae, 0x1e, 0x9a, 0x99, 0x4e, 0x7c, 0x70, 0x13, 0x03, 0x40, 0x42, 0x01, 0x8c, 0x51, + 0xda, 0xe9, 0x99, 0x25, 0x19, 0xa5, 0x49, 0x7a, 0xc9, 0xf9, 0xb9, 0xfa, 0xe9, 0xf9, 0x39, 0x89, + 0x79, 0xe9, 0x08, 0x27, 0x16, 0x94, 0x54, 0x16, 0xa4, 0x16, 0x23, 0x5c, 0xfa, 0x83, 0x91, 0x71, + 0x11, 0x13, 0xb3, 0x7b, 0x80, 0xd3, 0x2a, 0x26, 0x39, 0x77, 0x88, 0xc9, 0x01, 0x50, 0xb5, 0x7a, + 0xe1, 0xa9, 0x39, 0x39, 0xde, 0x79, 0xf9, 0xe5, 0x79, 0x21, 0x20, 0x3d, 0x49, 0x6c, 0x60, 0x43, + 0x8c, 0x01, 0x01, 0x00, 0x00, 0xff, 0xff, 0xbc, 0x77, 0x4a, 0x07, 0xf7, 0x00, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.proto b/vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.proto new file mode 100644 index 000000000..b7cbd1750 --- /dev/null +++ b/vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.proto @@ -0,0 +1,133 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option cc_enable_arenas = true; +option go_package = "github.com/golang/protobuf/ptypes/timestamp"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "TimestampProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; + +// A Timestamp represents a point in time independent of any time zone +// or calendar, represented as seconds and fractions of seconds at +// nanosecond resolution in UTC Epoch time. It is encoded using the +// Proleptic Gregorian Calendar which extends the Gregorian calendar +// backwards to year one. It is encoded assuming all minutes are 60 +// seconds long, i.e. leap seconds are "smeared" so that no leap second +// table is needed for interpretation. Range is from +// 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. +// By restricting to that range, we ensure that we can convert to +// and from RFC 3339 date strings. +// See [https://www.ietf.org/rfc/rfc3339.txt](https://www.ietf.org/rfc/rfc3339.txt). +// +// # Examples +// +// Example 1: Compute Timestamp from POSIX `time()`. +// +// Timestamp timestamp; +// timestamp.set_seconds(time(NULL)); +// timestamp.set_nanos(0); +// +// Example 2: Compute Timestamp from POSIX `gettimeofday()`. +// +// struct timeval tv; +// gettimeofday(&tv, NULL); +// +// Timestamp timestamp; +// timestamp.set_seconds(tv.tv_sec); +// timestamp.set_nanos(tv.tv_usec * 1000); +// +// Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. +// +// FILETIME ft; +// GetSystemTimeAsFileTime(&ft); +// UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; +// +// // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z +// // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. +// Timestamp timestamp; +// timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); +// timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); +// +// Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. +// +// long millis = System.currentTimeMillis(); +// +// Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) +// .setNanos((int) ((millis % 1000) * 1000000)).build(); +// +// +// Example 5: Compute Timestamp from current time in Python. +// +// timestamp = Timestamp() +// timestamp.GetCurrentTime() +// +// # JSON Mapping +// +// In JSON format, the Timestamp type is encoded as a string in the +// [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the +// format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" +// where {year} is always expressed using four digits while {month}, {day}, +// {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional +// seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), +// are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone +// is required, though only UTC (as indicated by "Z") is presently supported. +// +// For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past +// 01:30 UTC on January 15, 2017. +// +// In JavaScript, one can convert a Date object to this format using the +// standard [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString] +// method. In Python, a standard `datetime.datetime` object can be converted +// to this format using [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) +// with the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one +// can use the Joda Time's [`ISODateTimeFormat.dateTime()`]( +// http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime()) +// to obtain a formatter capable of generating timestamps in this format. +// +// +message Timestamp { + + // Represents seconds of UTC time since Unix epoch + // 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to + // 9999-12-31T23:59:59Z inclusive. + int64 seconds = 1; + + // Non-negative fractions of a second at nanosecond resolution. Negative + // second values with fractions must still have non-negative nanos values + // that count forward in time. Must be from 0 to 999,999,999 + // inclusive. + int32 nanos = 2; +} diff --git a/vendor/github.com/golang/protobuf/ptypes/timestamp_test.go b/vendor/github.com/golang/protobuf/ptypes/timestamp_test.go new file mode 100644 index 000000000..6e3c969b9 --- /dev/null +++ b/vendor/github.com/golang/protobuf/ptypes/timestamp_test.go @@ -0,0 +1,153 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2016 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package ptypes + +import ( + "math" + "testing" + "time" + + "github.com/golang/protobuf/proto" + tspb "github.com/golang/protobuf/ptypes/timestamp" +) + +var tests = []struct { + ts *tspb.Timestamp + valid bool + t time.Time +}{ + // The timestamp representing the Unix epoch date. + {&tspb.Timestamp{Seconds: 0, Nanos: 0}, true, utcDate(1970, 1, 1)}, + // The smallest representable timestamp. + {&tspb.Timestamp{Seconds: math.MinInt64, Nanos: math.MinInt32}, false, + time.Unix(math.MinInt64, math.MinInt32).UTC()}, + // The smallest representable timestamp with non-negative nanos. + {&tspb.Timestamp{Seconds: math.MinInt64, Nanos: 0}, false, time.Unix(math.MinInt64, 0).UTC()}, + // The earliest valid timestamp. + {&tspb.Timestamp{Seconds: minValidSeconds, Nanos: 0}, true, utcDate(1, 1, 1)}, + //"0001-01-01T00:00:00Z"}, + // The largest representable timestamp. + {&tspb.Timestamp{Seconds: math.MaxInt64, Nanos: math.MaxInt32}, false, + time.Unix(math.MaxInt64, math.MaxInt32).UTC()}, + // The largest representable timestamp with nanos in range. + {&tspb.Timestamp{Seconds: math.MaxInt64, Nanos: 1e9 - 1}, false, + time.Unix(math.MaxInt64, 1e9-1).UTC()}, + // The largest valid timestamp. + {&tspb.Timestamp{Seconds: maxValidSeconds - 1, Nanos: 1e9 - 1}, true, + time.Date(9999, 12, 31, 23, 59, 59, 1e9-1, time.UTC)}, + // The smallest invalid timestamp that is larger than the valid range. + {&tspb.Timestamp{Seconds: maxValidSeconds, Nanos: 0}, false, time.Unix(maxValidSeconds, 0).UTC()}, + // A date before the epoch. + {&tspb.Timestamp{Seconds: -281836800, Nanos: 0}, true, utcDate(1961, 1, 26)}, + // A date after the epoch. + {&tspb.Timestamp{Seconds: 1296000000, Nanos: 0}, true, utcDate(2011, 1, 26)}, + // A date after the epoch, in the middle of the day. + {&tspb.Timestamp{Seconds: 1296012345, Nanos: 940483}, true, + time.Date(2011, 1, 26, 3, 25, 45, 940483, time.UTC)}, +} + +func TestValidateTimestamp(t *testing.T) { + for _, s := range tests { + got := validateTimestamp(s.ts) + if (got == nil) != s.valid { + t.Errorf("validateTimestamp(%v) = %v, want %v", s.ts, got, s.valid) + } + } +} + +func TestTimestamp(t *testing.T) { + for _, s := range tests { + got, err := Timestamp(s.ts) + if (err == nil) != s.valid { + t.Errorf("Timestamp(%v) error = %v, but valid = %t", s.ts, err, s.valid) + } else if s.valid && got != s.t { + t.Errorf("Timestamp(%v) = %v, want %v", s.ts, got, s.t) + } + } + // Special case: a nil Timestamp is an error, but returns the 0 Unix time. + got, err := Timestamp(nil) + want := time.Unix(0, 0).UTC() + if got != want { + t.Errorf("Timestamp(nil) = %v, want %v", got, want) + } + if err == nil { + t.Errorf("Timestamp(nil) error = nil, expected error") + } +} + +func TestTimestampProto(t *testing.T) { + for _, s := range tests { + got, err := TimestampProto(s.t) + if (err == nil) != s.valid { + t.Errorf("TimestampProto(%v) error = %v, but valid = %t", s.t, err, s.valid) + } else if s.valid && !proto.Equal(got, s.ts) { + t.Errorf("TimestampProto(%v) = %v, want %v", s.t, got, s.ts) + } + } + // No corresponding special case here: no time.Time results in a nil Timestamp. +} + +func TestTimestampString(t *testing.T) { + for _, test := range []struct { + ts *tspb.Timestamp + want string + }{ + // Not much testing needed because presumably time.Format is + // well-tested. + {&tspb.Timestamp{Seconds: 0, Nanos: 0}, "1970-01-01T00:00:00Z"}, + {&tspb.Timestamp{Seconds: minValidSeconds - 1, Nanos: 0}, "(timestamp: seconds:-62135596801 before 0001-01-01)"}, + } { + got := TimestampString(test.ts) + if got != test.want { + t.Errorf("TimestampString(%v) = %q, want %q", test.ts, got, test.want) + } + } +} + +func utcDate(year, month, day int) time.Time { + return time.Date(year, time.Month(month), day, 0, 0, 0, 0, time.UTC) +} + +func TestTimestampNow(t *testing.T) { + // Bracket the expected time. + before := time.Now() + ts := TimestampNow() + after := time.Now() + + tm, err := Timestamp(ts) + if err != nil { + t.Errorf("between %v and %v\nTimestampNow() = %v\nwhich is invalid (%v)", before, after, ts, err) + } + if tm.Before(before) || tm.After(after) { + t.Errorf("between %v and %v\nTimestamp(TimestampNow()) = %v", before, after, tm) + } +} diff --git a/vendor/github.com/golang/protobuf/ptypes/wrappers/wrappers.pb.go b/vendor/github.com/golang/protobuf/ptypes/wrappers/wrappers.pb.go new file mode 100644 index 000000000..0ed59bf19 --- /dev/null +++ b/vendor/github.com/golang/protobuf/ptypes/wrappers/wrappers.pb.go @@ -0,0 +1,260 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/protobuf/wrappers.proto + +/* +Package wrappers is a generated protocol buffer package. + +It is generated from these files: + google/protobuf/wrappers.proto + +It has these top-level messages: + DoubleValue + FloatValue + Int64Value + UInt64Value + Int32Value + UInt32Value + BoolValue + StringValue + BytesValue +*/ +package wrappers + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Wrapper message for `double`. +// +// The JSON representation for `DoubleValue` is JSON number. +type DoubleValue struct { + // The double value. + Value float64 `protobuf:"fixed64,1,opt,name=value" json:"value,omitempty"` +} + +func (m *DoubleValue) Reset() { *m = DoubleValue{} } +func (m *DoubleValue) String() string { return proto.CompactTextString(m) } +func (*DoubleValue) ProtoMessage() {} +func (*DoubleValue) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } +func (*DoubleValue) XXX_WellKnownType() string { return "DoubleValue" } + +func (m *DoubleValue) GetValue() float64 { + if m != nil { + return m.Value + } + return 0 +} + +// Wrapper message for `float`. +// +// The JSON representation for `FloatValue` is JSON number. +type FloatValue struct { + // The float value. + Value float32 `protobuf:"fixed32,1,opt,name=value" json:"value,omitempty"` +} + +func (m *FloatValue) Reset() { *m = FloatValue{} } +func (m *FloatValue) String() string { return proto.CompactTextString(m) } +func (*FloatValue) ProtoMessage() {} +func (*FloatValue) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } +func (*FloatValue) XXX_WellKnownType() string { return "FloatValue" } + +func (m *FloatValue) GetValue() float32 { + if m != nil { + return m.Value + } + return 0 +} + +// Wrapper message for `int64`. +// +// The JSON representation for `Int64Value` is JSON string. +type Int64Value struct { + // The int64 value. + Value int64 `protobuf:"varint,1,opt,name=value" json:"value,omitempty"` +} + +func (m *Int64Value) Reset() { *m = Int64Value{} } +func (m *Int64Value) String() string { return proto.CompactTextString(m) } +func (*Int64Value) ProtoMessage() {} +func (*Int64Value) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } +func (*Int64Value) XXX_WellKnownType() string { return "Int64Value" } + +func (m *Int64Value) GetValue() int64 { + if m != nil { + return m.Value + } + return 0 +} + +// Wrapper message for `uint64`. +// +// The JSON representation for `UInt64Value` is JSON string. +type UInt64Value struct { + // The uint64 value. + Value uint64 `protobuf:"varint,1,opt,name=value" json:"value,omitempty"` +} + +func (m *UInt64Value) Reset() { *m = UInt64Value{} } +func (m *UInt64Value) String() string { return proto.CompactTextString(m) } +func (*UInt64Value) ProtoMessage() {} +func (*UInt64Value) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } +func (*UInt64Value) XXX_WellKnownType() string { return "UInt64Value" } + +func (m *UInt64Value) GetValue() uint64 { + if m != nil { + return m.Value + } + return 0 +} + +// Wrapper message for `int32`. +// +// The JSON representation for `Int32Value` is JSON number. +type Int32Value struct { + // The int32 value. + Value int32 `protobuf:"varint,1,opt,name=value" json:"value,omitempty"` +} + +func (m *Int32Value) Reset() { *m = Int32Value{} } +func (m *Int32Value) String() string { return proto.CompactTextString(m) } +func (*Int32Value) ProtoMessage() {} +func (*Int32Value) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } +func (*Int32Value) XXX_WellKnownType() string { return "Int32Value" } + +func (m *Int32Value) GetValue() int32 { + if m != nil { + return m.Value + } + return 0 +} + +// Wrapper message for `uint32`. +// +// The JSON representation for `UInt32Value` is JSON number. +type UInt32Value struct { + // The uint32 value. + Value uint32 `protobuf:"varint,1,opt,name=value" json:"value,omitempty"` +} + +func (m *UInt32Value) Reset() { *m = UInt32Value{} } +func (m *UInt32Value) String() string { return proto.CompactTextString(m) } +func (*UInt32Value) ProtoMessage() {} +func (*UInt32Value) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} } +func (*UInt32Value) XXX_WellKnownType() string { return "UInt32Value" } + +func (m *UInt32Value) GetValue() uint32 { + if m != nil { + return m.Value + } + return 0 +} + +// Wrapper message for `bool`. +// +// The JSON representation for `BoolValue` is JSON `true` and `false`. +type BoolValue struct { + // The bool value. + Value bool `protobuf:"varint,1,opt,name=value" json:"value,omitempty"` +} + +func (m *BoolValue) Reset() { *m = BoolValue{} } +func (m *BoolValue) String() string { return proto.CompactTextString(m) } +func (*BoolValue) ProtoMessage() {} +func (*BoolValue) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{6} } +func (*BoolValue) XXX_WellKnownType() string { return "BoolValue" } + +func (m *BoolValue) GetValue() bool { + if m != nil { + return m.Value + } + return false +} + +// Wrapper message for `string`. +// +// The JSON representation for `StringValue` is JSON string. +type StringValue struct { + // The string value. + Value string `protobuf:"bytes,1,opt,name=value" json:"value,omitempty"` +} + +func (m *StringValue) Reset() { *m = StringValue{} } +func (m *StringValue) String() string { return proto.CompactTextString(m) } +func (*StringValue) ProtoMessage() {} +func (*StringValue) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{7} } +func (*StringValue) XXX_WellKnownType() string { return "StringValue" } + +func (m *StringValue) GetValue() string { + if m != nil { + return m.Value + } + return "" +} + +// Wrapper message for `bytes`. +// +// The JSON representation for `BytesValue` is JSON string. +type BytesValue struct { + // The bytes value. + Value []byte `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"` +} + +func (m *BytesValue) Reset() { *m = BytesValue{} } +func (m *BytesValue) String() string { return proto.CompactTextString(m) } +func (*BytesValue) ProtoMessage() {} +func (*BytesValue) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{8} } +func (*BytesValue) XXX_WellKnownType() string { return "BytesValue" } + +func (m *BytesValue) GetValue() []byte { + if m != nil { + return m.Value + } + return nil +} + +func init() { + proto.RegisterType((*DoubleValue)(nil), "google.protobuf.DoubleValue") + proto.RegisterType((*FloatValue)(nil), "google.protobuf.FloatValue") + proto.RegisterType((*Int64Value)(nil), "google.protobuf.Int64Value") + proto.RegisterType((*UInt64Value)(nil), "google.protobuf.UInt64Value") + proto.RegisterType((*Int32Value)(nil), "google.protobuf.Int32Value") + proto.RegisterType((*UInt32Value)(nil), "google.protobuf.UInt32Value") + proto.RegisterType((*BoolValue)(nil), "google.protobuf.BoolValue") + proto.RegisterType((*StringValue)(nil), "google.protobuf.StringValue") + proto.RegisterType((*BytesValue)(nil), "google.protobuf.BytesValue") +} + +func init() { proto.RegisterFile("google/protobuf/wrappers.proto", fileDescriptor0) } + +var fileDescriptor0 = []byte{ + // 259 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4b, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x2f, 0x2f, 0x4a, 0x2c, + 0x28, 0x48, 0x2d, 0x2a, 0xd6, 0x03, 0x8b, 0x08, 0xf1, 0x43, 0xe4, 0xf5, 0x60, 0xf2, 0x4a, 0xca, + 0x5c, 0xdc, 0x2e, 0xf9, 0xa5, 0x49, 0x39, 0xa9, 0x61, 0x89, 0x39, 0xa5, 0xa9, 0x42, 0x22, 0x5c, + 0xac, 0x65, 0x20, 0x86, 0x04, 0xa3, 0x02, 0xa3, 0x06, 0x63, 0x10, 0x84, 0xa3, 0xa4, 0xc4, 0xc5, + 0xe5, 0x96, 0x93, 0x9f, 0x58, 0x82, 0x45, 0x0d, 0x13, 0x92, 0x1a, 0xcf, 0xbc, 0x12, 0x33, 0x13, + 0x2c, 0x6a, 0x98, 0x61, 0x6a, 0x94, 0xb9, 0xb8, 0x43, 0x71, 0x29, 0x62, 0x41, 0x35, 0xc8, 0xd8, + 0x08, 0x8b, 0x1a, 0x56, 0x34, 0x83, 0xb0, 0x2a, 0xe2, 0x85, 0x29, 0x52, 0xe4, 0xe2, 0x74, 0xca, + 0xcf, 0xcf, 0xc1, 0xa2, 0x84, 0x03, 0xc9, 0x9c, 0xe0, 0x92, 0xa2, 0xcc, 0xbc, 0x74, 0x2c, 0x8a, + 0x38, 0x91, 0x1c, 0xe4, 0x54, 0x59, 0x92, 0x5a, 0x8c, 0x45, 0x0d, 0x0f, 0x54, 0x8d, 0x53, 0x0d, + 0x97, 0x70, 0x72, 0x7e, 0xae, 0x1e, 0x5a, 0xe8, 0x3a, 0xf1, 0x86, 0x43, 0x83, 0x3f, 0x00, 0x24, + 0x12, 0xc0, 0x18, 0xa5, 0x95, 0x9e, 0x59, 0x92, 0x51, 0x9a, 0xa4, 0x97, 0x9c, 0x9f, 0xab, 0x9f, + 0x9e, 0x9f, 0x93, 0x98, 0x97, 0x8e, 0x88, 0xaa, 0x82, 0x92, 0xca, 0x82, 0xd4, 0x62, 0x78, 0x8c, + 0xfd, 0x60, 0x64, 0x5c, 0xc4, 0xc4, 0xec, 0x1e, 0xe0, 0xb4, 0x8a, 0x49, 0xce, 0x1d, 0x62, 0x6e, + 0x00, 0x54, 0xa9, 0x5e, 0x78, 0x6a, 0x4e, 0x8e, 0x77, 0x5e, 0x7e, 0x79, 0x5e, 0x08, 0x48, 0x4b, + 0x12, 0x1b, 0xd8, 0x0c, 0x63, 0x40, 0x00, 0x00, 0x00, 0xff, 0xff, 0x19, 0x6c, 0xb9, 0xb8, 0xfe, + 0x01, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/ptypes/wrappers/wrappers.proto b/vendor/github.com/golang/protobuf/ptypes/wrappers/wrappers.proto new file mode 100644 index 000000000..01947639a --- /dev/null +++ b/vendor/github.com/golang/protobuf/ptypes/wrappers/wrappers.proto @@ -0,0 +1,118 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Wrappers for primitive (non-message) types. These types are useful +// for embedding primitives in the `google.protobuf.Any` type and for places +// where we need to distinguish between the absence of a primitive +// typed field and its default value. + +syntax = "proto3"; + +package google.protobuf; + +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option cc_enable_arenas = true; +option go_package = "github.com/golang/protobuf/ptypes/wrappers"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "WrappersProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; + +// Wrapper message for `double`. +// +// The JSON representation for `DoubleValue` is JSON number. +message DoubleValue { + // The double value. + double value = 1; +} + +// Wrapper message for `float`. +// +// The JSON representation for `FloatValue` is JSON number. +message FloatValue { + // The float value. + float value = 1; +} + +// Wrapper message for `int64`. +// +// The JSON representation for `Int64Value` is JSON string. +message Int64Value { + // The int64 value. + int64 value = 1; +} + +// Wrapper message for `uint64`. +// +// The JSON representation for `UInt64Value` is JSON string. +message UInt64Value { + // The uint64 value. + uint64 value = 1; +} + +// Wrapper message for `int32`. +// +// The JSON representation for `Int32Value` is JSON number. +message Int32Value { + // The int32 value. + int32 value = 1; +} + +// Wrapper message for `uint32`. +// +// The JSON representation for `UInt32Value` is JSON number. +message UInt32Value { + // The uint32 value. + uint32 value = 1; +} + +// Wrapper message for `bool`. +// +// The JSON representation for `BoolValue` is JSON `true` and `false`. +message BoolValue { + // The bool value. + bool value = 1; +} + +// Wrapper message for `string`. +// +// The JSON representation for `StringValue` is JSON string. +message StringValue { + // The string value. + string value = 1; +} + +// Wrapper message for `bytes`. +// +// The JSON representation for `BytesValue` is JSON string. +message BytesValue { + // The bytes value. + bytes value = 1; +} diff --git a/vendor/github.com/google/btree/.travis.yml b/vendor/github.com/google/btree/.travis.yml new file mode 100644 index 000000000..4f2ee4d97 --- /dev/null +++ b/vendor/github.com/google/btree/.travis.yml @@ -0,0 +1 @@ +language: go diff --git a/vendor/github.com/google/btree/LICENSE b/vendor/github.com/google/btree/LICENSE new file mode 100644 index 000000000..d64569567 --- /dev/null +++ b/vendor/github.com/google/btree/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/google/btree/README.md b/vendor/github.com/google/btree/README.md new file mode 100644 index 000000000..6062a4dac --- /dev/null +++ b/vendor/github.com/google/btree/README.md @@ -0,0 +1,12 @@ +# BTree implementation for Go + +![Travis CI Build Status](https://api.travis-ci.org/google/btree.svg?branch=master) + +This package provides an in-memory B-Tree implementation for Go, useful as +an ordered, mutable data structure. + +The API is based off of the wonderful +http://godoc.org/github.com/petar/GoLLRB/llrb, and is meant to allow btree to +act as a drop-in replacement for gollrb trees. + +See http://godoc.org/github.com/google/btree for documentation. diff --git a/vendor/github.com/google/btree/btree.go b/vendor/github.com/google/btree/btree.go new file mode 100644 index 000000000..eb74b1d39 --- /dev/null +++ b/vendor/github.com/google/btree/btree.go @@ -0,0 +1,821 @@ +// Copyright 2014 Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package btree implements in-memory B-Trees of arbitrary degree. +// +// btree implements an in-memory B-Tree for use as an ordered data structure. +// It is not meant for persistent storage solutions. +// +// It has a flatter structure than an equivalent red-black or other binary tree, +// which in some cases yields better memory usage and/or performance. +// See some discussion on the matter here: +// http://google-opensource.blogspot.com/2013/01/c-containers-that-save-memory-and-time.html +// Note, though, that this project is in no way related to the C++ B-Tree +// implementation written about there. +// +// Within this tree, each node contains a slice of items and a (possibly nil) +// slice of children. For basic numeric values or raw structs, this can cause +// efficiency differences when compared to equivalent C++ template code that +// stores values in arrays within the node: +// * Due to the overhead of storing values as interfaces (each +// value needs to be stored as the value itself, then 2 words for the +// interface pointing to that value and its type), resulting in higher +// memory use. +// * Since interfaces can point to values anywhere in memory, values are +// most likely not stored in contiguous blocks, resulting in a higher +// number of cache misses. +// These issues don't tend to matter, though, when working with strings or other +// heap-allocated structures, since C++-equivalent structures also must store +// pointers and also distribute their values across the heap. +// +// This implementation is designed to be a drop-in replacement to gollrb.LLRB +// trees, (http://github.com/petar/gollrb), an excellent and probably the most +// widely used ordered tree implementation in the Go ecosystem currently. +// Its functions, therefore, exactly mirror those of +// llrb.LLRB where possible. Unlike gollrb, though, we currently don't +// support storing multiple equivalent values. +package btree + +import ( + "fmt" + "io" + "sort" + "strings" + "sync" +) + +// Item represents a single object in the tree. +type Item interface { + // Less tests whether the current item is less than the given argument. + // + // This must provide a strict weak ordering. + // If !a.Less(b) && !b.Less(a), we treat this to mean a == b (i.e. we can only + // hold one of either a or b in the tree). + Less(than Item) bool +} + +const ( + DefaultFreeListSize = 32 +) + +var ( + nilItems = make(items, 16) + nilChildren = make(children, 16) +) + +// FreeList represents a free list of btree nodes. By default each +// BTree has its own FreeList, but multiple BTrees can share the same +// FreeList. +// Two Btrees using the same freelist are safe for concurrent write access. +type FreeList struct { + mu sync.Mutex + freelist []*node +} + +// NewFreeList creates a new free list. +// size is the maximum size of the returned free list. +func NewFreeList(size int) *FreeList { + return &FreeList{freelist: make([]*node, 0, size)} +} + +func (f *FreeList) newNode() (n *node) { + f.mu.Lock() + index := len(f.freelist) - 1 + if index < 0 { + f.mu.Unlock() + return new(node) + } + n = f.freelist[index] + f.freelist[index] = nil + f.freelist = f.freelist[:index] + f.mu.Unlock() + return +} + +func (f *FreeList) freeNode(n *node) { + f.mu.Lock() + if len(f.freelist) < cap(f.freelist) { + f.freelist = append(f.freelist, n) + } + f.mu.Unlock() +} + +// ItemIterator allows callers of Ascend* to iterate in-order over portions of +// the tree. When this function returns false, iteration will stop and the +// associated Ascend* function will immediately return. +type ItemIterator func(i Item) bool + +// New creates a new B-Tree with the given degree. +// +// New(2), for example, will create a 2-3-4 tree (each node contains 1-3 items +// and 2-4 children). +func New(degree int) *BTree { + return NewWithFreeList(degree, NewFreeList(DefaultFreeListSize)) +} + +// NewWithFreeList creates a new B-Tree that uses the given node free list. +func NewWithFreeList(degree int, f *FreeList) *BTree { + if degree <= 1 { + panic("bad degree") + } + return &BTree{ + degree: degree, + cow: ©OnWriteContext{freelist: f}, + } +} + +// items stores items in a node. +type items []Item + +// insertAt inserts a value into the given index, pushing all subsequent values +// forward. +func (s *items) insertAt(index int, item Item) { + *s = append(*s, nil) + if index < len(*s) { + copy((*s)[index+1:], (*s)[index:]) + } + (*s)[index] = item +} + +// removeAt removes a value at a given index, pulling all subsequent values +// back. +func (s *items) removeAt(index int) Item { + item := (*s)[index] + copy((*s)[index:], (*s)[index+1:]) + (*s)[len(*s)-1] = nil + *s = (*s)[:len(*s)-1] + return item +} + +// pop removes and returns the last element in the list. +func (s *items) pop() (out Item) { + index := len(*s) - 1 + out = (*s)[index] + (*s)[index] = nil + *s = (*s)[:index] + return +} + +// truncate truncates this instance at index so that it contains only the +// first index items. index must be less than or equal to length. +func (s *items) truncate(index int) { + var toClear items + *s, toClear = (*s)[:index], (*s)[index:] + for len(toClear) > 0 { + toClear = toClear[copy(toClear, nilItems):] + } +} + +// find returns the index where the given item should be inserted into this +// list. 'found' is true if the item already exists in the list at the given +// index. +func (s items) find(item Item) (index int, found bool) { + i := sort.Search(len(s), func(i int) bool { + return item.Less(s[i]) + }) + if i > 0 && !s[i-1].Less(item) { + return i - 1, true + } + return i, false +} + +// children stores child nodes in a node. +type children []*node + +// insertAt inserts a value into the given index, pushing all subsequent values +// forward. +func (s *children) insertAt(index int, n *node) { + *s = append(*s, nil) + if index < len(*s) { + copy((*s)[index+1:], (*s)[index:]) + } + (*s)[index] = n +} + +// removeAt removes a value at a given index, pulling all subsequent values +// back. +func (s *children) removeAt(index int) *node { + n := (*s)[index] + copy((*s)[index:], (*s)[index+1:]) + (*s)[len(*s)-1] = nil + *s = (*s)[:len(*s)-1] + return n +} + +// pop removes and returns the last element in the list. +func (s *children) pop() (out *node) { + index := len(*s) - 1 + out = (*s)[index] + (*s)[index] = nil + *s = (*s)[:index] + return +} + +// truncate truncates this instance at index so that it contains only the +// first index children. index must be less than or equal to length. +func (s *children) truncate(index int) { + var toClear children + *s, toClear = (*s)[:index], (*s)[index:] + for len(toClear) > 0 { + toClear = toClear[copy(toClear, nilChildren):] + } +} + +// node is an internal node in a tree. +// +// It must at all times maintain the invariant that either +// * len(children) == 0, len(items) unconstrained +// * len(children) == len(items) + 1 +type node struct { + items items + children children + cow *copyOnWriteContext +} + +func (n *node) mutableFor(cow *copyOnWriteContext) *node { + if n.cow == cow { + return n + } + out := cow.newNode() + if cap(out.items) >= len(n.items) { + out.items = out.items[:len(n.items)] + } else { + out.items = make(items, len(n.items), cap(n.items)) + } + copy(out.items, n.items) + // Copy children + if cap(out.children) >= len(n.children) { + out.children = out.children[:len(n.children)] + } else { + out.children = make(children, len(n.children), cap(n.children)) + } + copy(out.children, n.children) + return out +} + +func (n *node) mutableChild(i int) *node { + c := n.children[i].mutableFor(n.cow) + n.children[i] = c + return c +} + +// split splits the given node at the given index. The current node shrinks, +// and this function returns the item that existed at that index and a new node +// containing all items/children after it. +func (n *node) split(i int) (Item, *node) { + item := n.items[i] + next := n.cow.newNode() + next.items = append(next.items, n.items[i+1:]...) + n.items.truncate(i) + if len(n.children) > 0 { + next.children = append(next.children, n.children[i+1:]...) + n.children.truncate(i + 1) + } + return item, next +} + +// maybeSplitChild checks if a child should be split, and if so splits it. +// Returns whether or not a split occurred. +func (n *node) maybeSplitChild(i, maxItems int) bool { + if len(n.children[i].items) < maxItems { + return false + } + first := n.mutableChild(i) + item, second := first.split(maxItems / 2) + n.items.insertAt(i, item) + n.children.insertAt(i+1, second) + return true +} + +// insert inserts an item into the subtree rooted at this node, making sure +// no nodes in the subtree exceed maxItems items. Should an equivalent item be +// be found/replaced by insert, it will be returned. +func (n *node) insert(item Item, maxItems int) Item { + i, found := n.items.find(item) + if found { + out := n.items[i] + n.items[i] = item + return out + } + if len(n.children) == 0 { + n.items.insertAt(i, item) + return nil + } + if n.maybeSplitChild(i, maxItems) { + inTree := n.items[i] + switch { + case item.Less(inTree): + // no change, we want first split node + case inTree.Less(item): + i++ // we want second split node + default: + out := n.items[i] + n.items[i] = item + return out + } + } + return n.mutableChild(i).insert(item, maxItems) +} + +// get finds the given key in the subtree and returns it. +func (n *node) get(key Item) Item { + i, found := n.items.find(key) + if found { + return n.items[i] + } else if len(n.children) > 0 { + return n.children[i].get(key) + } + return nil +} + +// min returns the first item in the subtree. +func min(n *node) Item { + if n == nil { + return nil + } + for len(n.children) > 0 { + n = n.children[0] + } + if len(n.items) == 0 { + return nil + } + return n.items[0] +} + +// max returns the last item in the subtree. +func max(n *node) Item { + if n == nil { + return nil + } + for len(n.children) > 0 { + n = n.children[len(n.children)-1] + } + if len(n.items) == 0 { + return nil + } + return n.items[len(n.items)-1] +} + +// toRemove details what item to remove in a node.remove call. +type toRemove int + +const ( + removeItem toRemove = iota // removes the given item + removeMin // removes smallest item in the subtree + removeMax // removes largest item in the subtree +) + +// remove removes an item from the subtree rooted at this node. +func (n *node) remove(item Item, minItems int, typ toRemove) Item { + var i int + var found bool + switch typ { + case removeMax: + if len(n.children) == 0 { + return n.items.pop() + } + i = len(n.items) + case removeMin: + if len(n.children) == 0 { + return n.items.removeAt(0) + } + i = 0 + case removeItem: + i, found = n.items.find(item) + if len(n.children) == 0 { + if found { + return n.items.removeAt(i) + } + return nil + } + default: + panic("invalid type") + } + // If we get to here, we have children. + if len(n.children[i].items) <= minItems { + return n.growChildAndRemove(i, item, minItems, typ) + } + child := n.mutableChild(i) + // Either we had enough items to begin with, or we've done some + // merging/stealing, because we've got enough now and we're ready to return + // stuff. + if found { + // The item exists at index 'i', and the child we've selected can give us a + // predecessor, since if we've gotten here it's got > minItems items in it. + out := n.items[i] + // We use our special-case 'remove' call with typ=maxItem to pull the + // predecessor of item i (the rightmost leaf of our immediate left child) + // and set it into where we pulled the item from. + n.items[i] = child.remove(nil, minItems, removeMax) + return out + } + // Final recursive call. Once we're here, we know that the item isn't in this + // node and that the child is big enough to remove from. + return child.remove(item, minItems, typ) +} + +// growChildAndRemove grows child 'i' to make sure it's possible to remove an +// item from it while keeping it at minItems, then calls remove to actually +// remove it. +// +// Most documentation says we have to do two sets of special casing: +// 1) item is in this node +// 2) item is in child +// In both cases, we need to handle the two subcases: +// A) node has enough values that it can spare one +// B) node doesn't have enough values +// For the latter, we have to check: +// a) left sibling has node to spare +// b) right sibling has node to spare +// c) we must merge +// To simplify our code here, we handle cases #1 and #2 the same: +// If a node doesn't have enough items, we make sure it does (using a,b,c). +// We then simply redo our remove call, and the second time (regardless of +// whether we're in case 1 or 2), we'll have enough items and can guarantee +// that we hit case A. +func (n *node) growChildAndRemove(i int, item Item, minItems int, typ toRemove) Item { + if i > 0 && len(n.children[i-1].items) > minItems { + // Steal from left child + child := n.mutableChild(i) + stealFrom := n.mutableChild(i - 1) + stolenItem := stealFrom.items.pop() + child.items.insertAt(0, n.items[i-1]) + n.items[i-1] = stolenItem + if len(stealFrom.children) > 0 { + child.children.insertAt(0, stealFrom.children.pop()) + } + } else if i < len(n.items) && len(n.children[i+1].items) > minItems { + // steal from right child + child := n.mutableChild(i) + stealFrom := n.mutableChild(i + 1) + stolenItem := stealFrom.items.removeAt(0) + child.items = append(child.items, n.items[i]) + n.items[i] = stolenItem + if len(stealFrom.children) > 0 { + child.children = append(child.children, stealFrom.children.removeAt(0)) + } + } else { + if i >= len(n.items) { + i-- + } + child := n.mutableChild(i) + // merge with right child + mergeItem := n.items.removeAt(i) + mergeChild := n.children.removeAt(i + 1) + child.items = append(child.items, mergeItem) + child.items = append(child.items, mergeChild.items...) + child.children = append(child.children, mergeChild.children...) + n.cow.freeNode(mergeChild) + } + return n.remove(item, minItems, typ) +} + +type direction int + +const ( + descend = direction(-1) + ascend = direction(+1) +) + +// iterate provides a simple method for iterating over elements in the tree. +// +// When ascending, the 'start' should be less than 'stop' and when descending, +// the 'start' should be greater than 'stop'. Setting 'includeStart' to true +// will force the iterator to include the first item when it equals 'start', +// thus creating a "greaterOrEqual" or "lessThanEqual" rather than just a +// "greaterThan" or "lessThan" queries. +func (n *node) iterate(dir direction, start, stop Item, includeStart bool, hit bool, iter ItemIterator) (bool, bool) { + var ok bool + switch dir { + case ascend: + for i := 0; i < len(n.items); i++ { + if start != nil && n.items[i].Less(start) { + continue + } + if len(n.children) > 0 { + if hit, ok = n.children[i].iterate(dir, start, stop, includeStart, hit, iter); !ok { + return hit, false + } + } + if !includeStart && !hit && start != nil && !start.Less(n.items[i]) { + hit = true + continue + } + hit = true + if stop != nil && !n.items[i].Less(stop) { + return hit, false + } + if !iter(n.items[i]) { + return hit, false + } + } + if len(n.children) > 0 { + if hit, ok = n.children[len(n.children)-1].iterate(dir, start, stop, includeStart, hit, iter); !ok { + return hit, false + } + } + case descend: + for i := len(n.items) - 1; i >= 0; i-- { + if start != nil && !n.items[i].Less(start) { + if !includeStart || hit || start.Less(n.items[i]) { + continue + } + } + if len(n.children) > 0 { + if hit, ok = n.children[i+1].iterate(dir, start, stop, includeStart, hit, iter); !ok { + return hit, false + } + } + if stop != nil && !stop.Less(n.items[i]) { + return hit, false // continue + } + hit = true + if !iter(n.items[i]) { + return hit, false + } + } + if len(n.children) > 0 { + if hit, ok = n.children[0].iterate(dir, start, stop, includeStart, hit, iter); !ok { + return hit, false + } + } + } + return hit, true +} + +// Used for testing/debugging purposes. +func (n *node) print(w io.Writer, level int) { + fmt.Fprintf(w, "%sNODE:%v\n", strings.Repeat(" ", level), n.items) + for _, c := range n.children { + c.print(w, level+1) + } +} + +// BTree is an implementation of a B-Tree. +// +// BTree stores Item instances in an ordered structure, allowing easy insertion, +// removal, and iteration. +// +// Write operations are not safe for concurrent mutation by multiple +// goroutines, but Read operations are. +type BTree struct { + degree int + length int + root *node + cow *copyOnWriteContext +} + +// copyOnWriteContext pointers determine node ownership... a tree with a write +// context equivalent to a node's write context is allowed to modify that node. +// A tree whose write context does not match a node's is not allowed to modify +// it, and must create a new, writable copy (IE: it's a Clone). +// +// When doing any write operation, we maintain the invariant that the current +// node's context is equal to the context of the tree that requested the write. +// We do this by, before we descend into any node, creating a copy with the +// correct context if the contexts don't match. +// +// Since the node we're currently visiting on any write has the requesting +// tree's context, that node is modifiable in place. Children of that node may +// not share context, but before we descend into them, we'll make a mutable +// copy. +type copyOnWriteContext struct { + freelist *FreeList +} + +// Clone clones the btree, lazily. Clone should not be called concurrently, +// but the original tree (t) and the new tree (t2) can be used concurrently +// once the Clone call completes. +// +// The internal tree structure of b is marked read-only and shared between t and +// t2. Writes to both t and t2 use copy-on-write logic, creating new nodes +// whenever one of b's original nodes would have been modified. Read operations +// should have no performance degredation. Write operations for both t and t2 +// will initially experience minor slow-downs caused by additional allocs and +// copies due to the aforementioned copy-on-write logic, but should converge to +// the original performance characteristics of the original tree. +func (t *BTree) Clone() (t2 *BTree) { + // Create two entirely new copy-on-write contexts. + // This operation effectively creates three trees: + // the original, shared nodes (old b.cow) + // the new b.cow nodes + // the new out.cow nodes + cow1, cow2 := *t.cow, *t.cow + out := *t + t.cow = &cow1 + out.cow = &cow2 + return &out +} + +// maxItems returns the max number of items to allow per node. +func (t *BTree) maxItems() int { + return t.degree*2 - 1 +} + +// minItems returns the min number of items to allow per node (ignored for the +// root node). +func (t *BTree) minItems() int { + return t.degree - 1 +} + +func (c *copyOnWriteContext) newNode() (n *node) { + n = c.freelist.newNode() + n.cow = c + return +} + +func (c *copyOnWriteContext) freeNode(n *node) { + if n.cow == c { + // clear to allow GC + n.items.truncate(0) + n.children.truncate(0) + n.cow = nil + c.freelist.freeNode(n) + } +} + +// ReplaceOrInsert adds the given item to the tree. If an item in the tree +// already equals the given one, it is removed from the tree and returned. +// Otherwise, nil is returned. +// +// nil cannot be added to the tree (will panic). +func (t *BTree) ReplaceOrInsert(item Item) Item { + if item == nil { + panic("nil item being added to BTree") + } + if t.root == nil { + t.root = t.cow.newNode() + t.root.items = append(t.root.items, item) + t.length++ + return nil + } else { + t.root = t.root.mutableFor(t.cow) + if len(t.root.items) >= t.maxItems() { + item2, second := t.root.split(t.maxItems() / 2) + oldroot := t.root + t.root = t.cow.newNode() + t.root.items = append(t.root.items, item2) + t.root.children = append(t.root.children, oldroot, second) + } + } + out := t.root.insert(item, t.maxItems()) + if out == nil { + t.length++ + } + return out +} + +// Delete removes an item equal to the passed in item from the tree, returning +// it. If no such item exists, returns nil. +func (t *BTree) Delete(item Item) Item { + return t.deleteItem(item, removeItem) +} + +// DeleteMin removes the smallest item in the tree and returns it. +// If no such item exists, returns nil. +func (t *BTree) DeleteMin() Item { + return t.deleteItem(nil, removeMin) +} + +// DeleteMax removes the largest item in the tree and returns it. +// If no such item exists, returns nil. +func (t *BTree) DeleteMax() Item { + return t.deleteItem(nil, removeMax) +} + +func (t *BTree) deleteItem(item Item, typ toRemove) Item { + if t.root == nil || len(t.root.items) == 0 { + return nil + } + t.root = t.root.mutableFor(t.cow) + out := t.root.remove(item, t.minItems(), typ) + if len(t.root.items) == 0 && len(t.root.children) > 0 { + oldroot := t.root + t.root = t.root.children[0] + t.cow.freeNode(oldroot) + } + if out != nil { + t.length-- + } + return out +} + +// AscendRange calls the iterator for every value in the tree within the range +// [greaterOrEqual, lessThan), until iterator returns false. +func (t *BTree) AscendRange(greaterOrEqual, lessThan Item, iterator ItemIterator) { + if t.root == nil { + return + } + t.root.iterate(ascend, greaterOrEqual, lessThan, true, false, iterator) +} + +// AscendLessThan calls the iterator for every value in the tree within the range +// [first, pivot), until iterator returns false. +func (t *BTree) AscendLessThan(pivot Item, iterator ItemIterator) { + if t.root == nil { + return + } + t.root.iterate(ascend, nil, pivot, false, false, iterator) +} + +// AscendGreaterOrEqual calls the iterator for every value in the tree within +// the range [pivot, last], until iterator returns false. +func (t *BTree) AscendGreaterOrEqual(pivot Item, iterator ItemIterator) { + if t.root == nil { + return + } + t.root.iterate(ascend, pivot, nil, true, false, iterator) +} + +// Ascend calls the iterator for every value in the tree within the range +// [first, last], until iterator returns false. +func (t *BTree) Ascend(iterator ItemIterator) { + if t.root == nil { + return + } + t.root.iterate(ascend, nil, nil, false, false, iterator) +} + +// DescendRange calls the iterator for every value in the tree within the range +// [lessOrEqual, greaterThan), until iterator returns false. +func (t *BTree) DescendRange(lessOrEqual, greaterThan Item, iterator ItemIterator) { + if t.root == nil { + return + } + t.root.iterate(descend, lessOrEqual, greaterThan, true, false, iterator) +} + +// DescendLessOrEqual calls the iterator for every value in the tree within the range +// [pivot, first], until iterator returns false. +func (t *BTree) DescendLessOrEqual(pivot Item, iterator ItemIterator) { + if t.root == nil { + return + } + t.root.iterate(descend, pivot, nil, true, false, iterator) +} + +// DescendGreaterThan calls the iterator for every value in the tree within +// the range (pivot, last], until iterator returns false. +func (t *BTree) DescendGreaterThan(pivot Item, iterator ItemIterator) { + if t.root == nil { + return + } + t.root.iterate(descend, nil, pivot, false, false, iterator) +} + +// Descend calls the iterator for every value in the tree within the range +// [last, first], until iterator returns false. +func (t *BTree) Descend(iterator ItemIterator) { + if t.root == nil { + return + } + t.root.iterate(descend, nil, nil, false, false, iterator) +} + +// Get looks for the key item in the tree, returning it. It returns nil if +// unable to find that item. +func (t *BTree) Get(key Item) Item { + if t.root == nil { + return nil + } + return t.root.get(key) +} + +// Min returns the smallest item in the tree, or nil if the tree is empty. +func (t *BTree) Min() Item { + return min(t.root) +} + +// Max returns the largest item in the tree, or nil if the tree is empty. +func (t *BTree) Max() Item { + return max(t.root) +} + +// Has returns true if the given key is in the tree. +func (t *BTree) Has(key Item) bool { + return t.Get(key) != nil +} + +// Len returns the number of items currently in the tree. +func (t *BTree) Len() int { + return t.length +} + +// Int implements the Item interface for integers. +type Int int + +// Less returns true if int(a) < int(b). +func (a Int) Less(b Item) bool { + return a < b.(Int) +} diff --git a/vendor/github.com/google/btree/btree_mem.go b/vendor/github.com/google/btree/btree_mem.go new file mode 100644 index 000000000..cb95b7fa1 --- /dev/null +++ b/vendor/github.com/google/btree/btree_mem.go @@ -0,0 +1,76 @@ +// Copyright 2014 Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// +build ignore + +// This binary compares memory usage between btree and gollrb. +package main + +import ( + "flag" + "fmt" + "math/rand" + "runtime" + "time" + + "github.com/google/btree" + "github.com/petar/GoLLRB/llrb" +) + +var ( + size = flag.Int("size", 1000000, "size of the tree to build") + degree = flag.Int("degree", 8, "degree of btree") + gollrb = flag.Bool("llrb", false, "use llrb instead of btree") +) + +func main() { + flag.Parse() + vals := rand.Perm(*size) + var t, v interface{} + v = vals + var stats runtime.MemStats + for i := 0; i < 10; i++ { + runtime.GC() + } + fmt.Println("-------- BEFORE ----------") + runtime.ReadMemStats(&stats) + fmt.Printf("%+v\n", stats) + start := time.Now() + if *gollrb { + tr := llrb.New() + for _, v := range vals { + tr.ReplaceOrInsert(llrb.Int(v)) + } + t = tr // keep it around + } else { + tr := btree.New(*degree) + for _, v := range vals { + tr.ReplaceOrInsert(btree.Int(v)) + } + t = tr // keep it around + } + fmt.Printf("%v inserts in %v\n", *size, time.Since(start)) + fmt.Println("-------- AFTER ----------") + runtime.ReadMemStats(&stats) + fmt.Printf("%+v\n", stats) + for i := 0; i < 10; i++ { + runtime.GC() + } + fmt.Println("-------- AFTER GC ----------") + runtime.ReadMemStats(&stats) + fmt.Printf("%+v\n", stats) + if t == v { + fmt.Println("to make sure vals and tree aren't GC'd") + } +} diff --git a/vendor/github.com/google/btree/btree_test.go b/vendor/github.com/google/btree/btree_test.go new file mode 100644 index 000000000..5da9d8b69 --- /dev/null +++ b/vendor/github.com/google/btree/btree_test.go @@ -0,0 +1,689 @@ +// Copyright 2014 Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package btree + +import ( + "flag" + "fmt" + "math/rand" + "reflect" + "sort" + "sync" + "testing" + "time" +) + +func init() { + seed := time.Now().Unix() + fmt.Println(seed) + rand.Seed(seed) +} + +// perm returns a random permutation of n Int items in the range [0, n). +func perm(n int) (out []Item) { + for _, v := range rand.Perm(n) { + out = append(out, Int(v)) + } + return +} + +// rang returns an ordered list of Int items in the range [0, n). +func rang(n int) (out []Item) { + for i := 0; i < n; i++ { + out = append(out, Int(i)) + } + return +} + +// all extracts all items from a tree in order as a slice. +func all(t *BTree) (out []Item) { + t.Ascend(func(a Item) bool { + out = append(out, a) + return true + }) + return +} + +// rangerev returns a reversed ordered list of Int items in the range [0, n). +func rangrev(n int) (out []Item) { + for i := n - 1; i >= 0; i-- { + out = append(out, Int(i)) + } + return +} + +// allrev extracts all items from a tree in reverse order as a slice. +func allrev(t *BTree) (out []Item) { + t.Descend(func(a Item) bool { + out = append(out, a) + return true + }) + return +} + +var btreeDegree = flag.Int("degree", 32, "B-Tree degree") + +func TestBTree(t *testing.T) { + tr := New(*btreeDegree) + const treeSize = 10000 + for i := 0; i < 10; i++ { + if min := tr.Min(); min != nil { + t.Fatalf("empty min, got %+v", min) + } + if max := tr.Max(); max != nil { + t.Fatalf("empty max, got %+v", max) + } + for _, item := range perm(treeSize) { + if x := tr.ReplaceOrInsert(item); x != nil { + t.Fatal("insert found item", item) + } + } + for _, item := range perm(treeSize) { + if x := tr.ReplaceOrInsert(item); x == nil { + t.Fatal("insert didn't find item", item) + } + } + if min, want := tr.Min(), Item(Int(0)); min != want { + t.Fatalf("min: want %+v, got %+v", want, min) + } + if max, want := tr.Max(), Item(Int(treeSize-1)); max != want { + t.Fatalf("max: want %+v, got %+v", want, max) + } + got := all(tr) + want := rang(treeSize) + if !reflect.DeepEqual(got, want) { + t.Fatalf("mismatch:\n got: %v\nwant: %v", got, want) + } + + gotrev := allrev(tr) + wantrev := rangrev(treeSize) + if !reflect.DeepEqual(gotrev, wantrev) { + t.Fatalf("mismatch:\n got: %v\nwant: %v", got, want) + } + + for _, item := range perm(treeSize) { + if x := tr.Delete(item); x == nil { + t.Fatalf("didn't find %v", item) + } + } + if got = all(tr); len(got) > 0 { + t.Fatalf("some left!: %v", got) + } + } +} + +func ExampleBTree() { + tr := New(*btreeDegree) + for i := Int(0); i < 10; i++ { + tr.ReplaceOrInsert(i) + } + fmt.Println("len: ", tr.Len()) + fmt.Println("get3: ", tr.Get(Int(3))) + fmt.Println("get100: ", tr.Get(Int(100))) + fmt.Println("del4: ", tr.Delete(Int(4))) + fmt.Println("del100: ", tr.Delete(Int(100))) + fmt.Println("replace5: ", tr.ReplaceOrInsert(Int(5))) + fmt.Println("replace100:", tr.ReplaceOrInsert(Int(100))) + fmt.Println("min: ", tr.Min()) + fmt.Println("delmin: ", tr.DeleteMin()) + fmt.Println("max: ", tr.Max()) + fmt.Println("delmax: ", tr.DeleteMax()) + fmt.Println("len: ", tr.Len()) + // Output: + // len: 10 + // get3: 3 + // get100: + // del4: 4 + // del100: + // replace5: 5 + // replace100: + // min: 0 + // delmin: 0 + // max: 100 + // delmax: 100 + // len: 8 +} + +func TestDeleteMin(t *testing.T) { + tr := New(3) + for _, v := range perm(100) { + tr.ReplaceOrInsert(v) + } + var got []Item + for v := tr.DeleteMin(); v != nil; v = tr.DeleteMin() { + got = append(got, v) + } + if want := rang(100); !reflect.DeepEqual(got, want) { + t.Fatalf("ascendrange:\n got: %v\nwant: %v", got, want) + } +} + +func TestDeleteMax(t *testing.T) { + tr := New(3) + for _, v := range perm(100) { + tr.ReplaceOrInsert(v) + } + var got []Item + for v := tr.DeleteMax(); v != nil; v = tr.DeleteMax() { + got = append(got, v) + } + // Reverse our list. + for i := 0; i < len(got)/2; i++ { + got[i], got[len(got)-i-1] = got[len(got)-i-1], got[i] + } + if want := rang(100); !reflect.DeepEqual(got, want) { + t.Fatalf("ascendrange:\n got: %v\nwant: %v", got, want) + } +} + +func TestAscendRange(t *testing.T) { + tr := New(2) + for _, v := range perm(100) { + tr.ReplaceOrInsert(v) + } + var got []Item + tr.AscendRange(Int(40), Int(60), func(a Item) bool { + got = append(got, a) + return true + }) + if want := rang(100)[40:60]; !reflect.DeepEqual(got, want) { + t.Fatalf("ascendrange:\n got: %v\nwant: %v", got, want) + } + got = got[:0] + tr.AscendRange(Int(40), Int(60), func(a Item) bool { + if a.(Int) > 50 { + return false + } + got = append(got, a) + return true + }) + if want := rang(100)[40:51]; !reflect.DeepEqual(got, want) { + t.Fatalf("ascendrange:\n got: %v\nwant: %v", got, want) + } +} + +func TestDescendRange(t *testing.T) { + tr := New(2) + for _, v := range perm(100) { + tr.ReplaceOrInsert(v) + } + var got []Item + tr.DescendRange(Int(60), Int(40), func(a Item) bool { + got = append(got, a) + return true + }) + if want := rangrev(100)[39:59]; !reflect.DeepEqual(got, want) { + t.Fatalf("descendrange:\n got: %v\nwant: %v", got, want) + } + got = got[:0] + tr.DescendRange(Int(60), Int(40), func(a Item) bool { + if a.(Int) < 50 { + return false + } + got = append(got, a) + return true + }) + if want := rangrev(100)[39:50]; !reflect.DeepEqual(got, want) { + t.Fatalf("descendrange:\n got: %v\nwant: %v", got, want) + } +} +func TestAscendLessThan(t *testing.T) { + tr := New(*btreeDegree) + for _, v := range perm(100) { + tr.ReplaceOrInsert(v) + } + var got []Item + tr.AscendLessThan(Int(60), func(a Item) bool { + got = append(got, a) + return true + }) + if want := rang(100)[:60]; !reflect.DeepEqual(got, want) { + t.Fatalf("ascendrange:\n got: %v\nwant: %v", got, want) + } + got = got[:0] + tr.AscendLessThan(Int(60), func(a Item) bool { + if a.(Int) > 50 { + return false + } + got = append(got, a) + return true + }) + if want := rang(100)[:51]; !reflect.DeepEqual(got, want) { + t.Fatalf("ascendrange:\n got: %v\nwant: %v", got, want) + } +} + +func TestDescendLessOrEqual(t *testing.T) { + tr := New(*btreeDegree) + for _, v := range perm(100) { + tr.ReplaceOrInsert(v) + } + var got []Item + tr.DescendLessOrEqual(Int(40), func(a Item) bool { + got = append(got, a) + return true + }) + if want := rangrev(100)[59:]; !reflect.DeepEqual(got, want) { + t.Fatalf("descendlessorequal:\n got: %v\nwant: %v", got, want) + } + got = got[:0] + tr.DescendLessOrEqual(Int(60), func(a Item) bool { + if a.(Int) < 50 { + return false + } + got = append(got, a) + return true + }) + if want := rangrev(100)[39:50]; !reflect.DeepEqual(got, want) { + t.Fatalf("descendlessorequal:\n got: %v\nwant: %v", got, want) + } +} +func TestAscendGreaterOrEqual(t *testing.T) { + tr := New(*btreeDegree) + for _, v := range perm(100) { + tr.ReplaceOrInsert(v) + } + var got []Item + tr.AscendGreaterOrEqual(Int(40), func(a Item) bool { + got = append(got, a) + return true + }) + if want := rang(100)[40:]; !reflect.DeepEqual(got, want) { + t.Fatalf("ascendrange:\n got: %v\nwant: %v", got, want) + } + got = got[:0] + tr.AscendGreaterOrEqual(Int(40), func(a Item) bool { + if a.(Int) > 50 { + return false + } + got = append(got, a) + return true + }) + if want := rang(100)[40:51]; !reflect.DeepEqual(got, want) { + t.Fatalf("ascendrange:\n got: %v\nwant: %v", got, want) + } +} + +func TestDescendGreaterThan(t *testing.T) { + tr := New(*btreeDegree) + for _, v := range perm(100) { + tr.ReplaceOrInsert(v) + } + var got []Item + tr.DescendGreaterThan(Int(40), func(a Item) bool { + got = append(got, a) + return true + }) + if want := rangrev(100)[:59]; !reflect.DeepEqual(got, want) { + t.Fatalf("descendgreaterthan:\n got: %v\nwant: %v", got, want) + } + got = got[:0] + tr.DescendGreaterThan(Int(40), func(a Item) bool { + if a.(Int) < 50 { + return false + } + got = append(got, a) + return true + }) + if want := rangrev(100)[:50]; !reflect.DeepEqual(got, want) { + t.Fatalf("descendgreaterthan:\n got: %v\nwant: %v", got, want) + } +} + +const benchmarkTreeSize = 10000 + +func BenchmarkInsert(b *testing.B) { + b.StopTimer() + insertP := perm(benchmarkTreeSize) + b.StartTimer() + i := 0 + for i < b.N { + tr := New(*btreeDegree) + for _, item := range insertP { + tr.ReplaceOrInsert(item) + i++ + if i >= b.N { + return + } + } + } +} + +func BenchmarkDeleteInsert(b *testing.B) { + b.StopTimer() + insertP := perm(benchmarkTreeSize) + tr := New(*btreeDegree) + for _, item := range insertP { + tr.ReplaceOrInsert(item) + } + b.StartTimer() + for i := 0; i < b.N; i++ { + tr.Delete(insertP[i%benchmarkTreeSize]) + tr.ReplaceOrInsert(insertP[i%benchmarkTreeSize]) + } +} + +func BenchmarkDeleteInsertCloneOnce(b *testing.B) { + b.StopTimer() + insertP := perm(benchmarkTreeSize) + tr := New(*btreeDegree) + for _, item := range insertP { + tr.ReplaceOrInsert(item) + } + tr = tr.Clone() + b.StartTimer() + for i := 0; i < b.N; i++ { + tr.Delete(insertP[i%benchmarkTreeSize]) + tr.ReplaceOrInsert(insertP[i%benchmarkTreeSize]) + } +} + +func BenchmarkDeleteInsertCloneEachTime(b *testing.B) { + b.StopTimer() + insertP := perm(benchmarkTreeSize) + tr := New(*btreeDegree) + for _, item := range insertP { + tr.ReplaceOrInsert(item) + } + b.StartTimer() + for i := 0; i < b.N; i++ { + tr = tr.Clone() + tr.Delete(insertP[i%benchmarkTreeSize]) + tr.ReplaceOrInsert(insertP[i%benchmarkTreeSize]) + } +} + +func BenchmarkDelete(b *testing.B) { + b.StopTimer() + insertP := perm(benchmarkTreeSize) + removeP := perm(benchmarkTreeSize) + b.StartTimer() + i := 0 + for i < b.N { + b.StopTimer() + tr := New(*btreeDegree) + for _, v := range insertP { + tr.ReplaceOrInsert(v) + } + b.StartTimer() + for _, item := range removeP { + tr.Delete(item) + i++ + if i >= b.N { + return + } + } + if tr.Len() > 0 { + panic(tr.Len()) + } + } +} + +func BenchmarkGet(b *testing.B) { + b.StopTimer() + insertP := perm(benchmarkTreeSize) + removeP := perm(benchmarkTreeSize) + b.StartTimer() + i := 0 + for i < b.N { + b.StopTimer() + tr := New(*btreeDegree) + for _, v := range insertP { + tr.ReplaceOrInsert(v) + } + b.StartTimer() + for _, item := range removeP { + tr.Get(item) + i++ + if i >= b.N { + return + } + } + } +} + +func BenchmarkGetCloneEachTime(b *testing.B) { + b.StopTimer() + insertP := perm(benchmarkTreeSize) + removeP := perm(benchmarkTreeSize) + b.StartTimer() + i := 0 + for i < b.N { + b.StopTimer() + tr := New(*btreeDegree) + for _, v := range insertP { + tr.ReplaceOrInsert(v) + } + b.StartTimer() + for _, item := range removeP { + tr = tr.Clone() + tr.Get(item) + i++ + if i >= b.N { + return + } + } + } +} + +type byInts []Item + +func (a byInts) Len() int { + return len(a) +} + +func (a byInts) Less(i, j int) bool { + return a[i].(Int) < a[j].(Int) +} + +func (a byInts) Swap(i, j int) { + a[i], a[j] = a[j], a[i] +} + +func BenchmarkAscend(b *testing.B) { + arr := perm(benchmarkTreeSize) + tr := New(*btreeDegree) + for _, v := range arr { + tr.ReplaceOrInsert(v) + } + sort.Sort(byInts(arr)) + b.ResetTimer() + for i := 0; i < b.N; i++ { + j := 0 + tr.Ascend(func(item Item) bool { + if item.(Int) != arr[j].(Int) { + b.Fatalf("mismatch: expected: %v, got %v", arr[j].(Int), item.(Int)) + } + j++ + return true + }) + } +} + +func BenchmarkDescend(b *testing.B) { + arr := perm(benchmarkTreeSize) + tr := New(*btreeDegree) + for _, v := range arr { + tr.ReplaceOrInsert(v) + } + sort.Sort(byInts(arr)) + b.ResetTimer() + for i := 0; i < b.N; i++ { + j := len(arr) - 1 + tr.Descend(func(item Item) bool { + if item.(Int) != arr[j].(Int) { + b.Fatalf("mismatch: expected: %v, got %v", arr[j].(Int), item.(Int)) + } + j-- + return true + }) + } +} +func BenchmarkAscendRange(b *testing.B) { + arr := perm(benchmarkTreeSize) + tr := New(*btreeDegree) + for _, v := range arr { + tr.ReplaceOrInsert(v) + } + sort.Sort(byInts(arr)) + b.ResetTimer() + for i := 0; i < b.N; i++ { + j := 100 + tr.AscendRange(Int(100), arr[len(arr)-100], func(item Item) bool { + if item.(Int) != arr[j].(Int) { + b.Fatalf("mismatch: expected: %v, got %v", arr[j].(Int), item.(Int)) + } + j++ + return true + }) + if j != len(arr)-100 { + b.Fatalf("expected: %v, got %v", len(arr)-100, j) + } + } +} + +func BenchmarkDescendRange(b *testing.B) { + arr := perm(benchmarkTreeSize) + tr := New(*btreeDegree) + for _, v := range arr { + tr.ReplaceOrInsert(v) + } + sort.Sort(byInts(arr)) + b.ResetTimer() + for i := 0; i < b.N; i++ { + j := len(arr) - 100 + tr.DescendRange(arr[len(arr)-100], Int(100), func(item Item) bool { + if item.(Int) != arr[j].(Int) { + b.Fatalf("mismatch: expected: %v, got %v", arr[j].(Int), item.(Int)) + } + j-- + return true + }) + if j != 100 { + b.Fatalf("expected: %v, got %v", len(arr)-100, j) + } + } +} +func BenchmarkAscendGreaterOrEqual(b *testing.B) { + arr := perm(benchmarkTreeSize) + tr := New(*btreeDegree) + for _, v := range arr { + tr.ReplaceOrInsert(v) + } + sort.Sort(byInts(arr)) + b.ResetTimer() + for i := 0; i < b.N; i++ { + j := 100 + k := 0 + tr.AscendGreaterOrEqual(Int(100), func(item Item) bool { + if item.(Int) != arr[j].(Int) { + b.Fatalf("mismatch: expected: %v, got %v", arr[j].(Int), item.(Int)) + } + j++ + k++ + return true + }) + if j != len(arr) { + b.Fatalf("expected: %v, got %v", len(arr), j) + } + if k != len(arr)-100 { + b.Fatalf("expected: %v, got %v", len(arr)-100, k) + } + } +} +func BenchmarkDescendLessOrEqual(b *testing.B) { + arr := perm(benchmarkTreeSize) + tr := New(*btreeDegree) + for _, v := range arr { + tr.ReplaceOrInsert(v) + } + sort.Sort(byInts(arr)) + b.ResetTimer() + for i := 0; i < b.N; i++ { + j := len(arr) - 100 + k := len(arr) + tr.DescendLessOrEqual(arr[len(arr)-100], func(item Item) bool { + if item.(Int) != arr[j].(Int) { + b.Fatalf("mismatch: expected: %v, got %v", arr[j].(Int), item.(Int)) + } + j-- + k-- + return true + }) + if j != -1 { + b.Fatalf("expected: %v, got %v", -1, j) + } + if k != 99 { + b.Fatalf("expected: %v, got %v", 99, k) + } + } +} + +const cloneTestSize = 10000 + +func cloneTest(t *testing.T, b *BTree, start int, p []Item, wg *sync.WaitGroup, trees *[]*BTree) { + t.Logf("Starting new clone at %v", start) + *trees = append(*trees, b) + for i := start; i < cloneTestSize; i++ { + b.ReplaceOrInsert(p[i]) + if i%(cloneTestSize/5) == 0 { + wg.Add(1) + go cloneTest(t, b.Clone(), i+1, p, wg, trees) + } + } + wg.Done() +} + +func TestCloneConcurrentOperations(t *testing.T) { + b := New(*btreeDegree) + trees := []*BTree{} + p := perm(cloneTestSize) + var wg sync.WaitGroup + wg.Add(1) + go cloneTest(t, b, 0, p, &wg, &trees) + wg.Wait() + want := rang(cloneTestSize) + t.Logf("Starting equality checks on %d trees", len(trees)) + for i, tree := range trees { + if !reflect.DeepEqual(want, all(tree)) { + t.Errorf("tree %v mismatch", i) + } + } + t.Log("Removing half from first half") + toRemove := rang(cloneTestSize)[cloneTestSize/2:] + for i := 0; i < len(trees)/2; i++ { + tree := trees[i] + wg.Add(1) + go func() { + for _, item := range toRemove { + tree.Delete(item) + } + wg.Done() + }() + } + wg.Wait() + t.Log("Checking all values again") + for i, tree := range trees { + var wantpart []Item + if i < len(trees)/2 { + wantpart = want[:cloneTestSize/2] + } else { + wantpart = want + } + if got := all(tree); !reflect.DeepEqual(wantpart, got) { + t.Errorf("tree %v mismatch, want %v got %v", i, len(want), len(got)) + } + } +} diff --git a/vendor/github.com/googleapis/gnostic/.gitignore b/vendor/github.com/googleapis/gnostic/.gitignore new file mode 100644 index 000000000..63149fdda --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/.gitignore @@ -0,0 +1,14 @@ +# Eclipse +.checkstyle +.project +.settings +# Swift +.build +Packages +# vi +*.swp +# vscode +.vscode +.DS_Store +*~ +Package.resolved diff --git a/vendor/github.com/googleapis/gnostic/.travis-install.sh b/vendor/github.com/googleapis/gnostic/.travis-install.sh new file mode 100755 index 000000000..83319ae4c --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/.travis-install.sh @@ -0,0 +1,29 @@ +#!/bin/sh + +# +# Install dependencies that aren't available as Ubuntu packages. +# +# Everything goes into $HOME/local. +# +# Scripts should add +# - $HOME/local/bin to PATH +# - $HOME/local/lib to LD_LIBRARY_PATH +# + +cd +mkdir -p local + +# Install swift +SWIFT_URL=https://swift.org/builds/swift-4.0-branch/ubuntu1404/swift-4.0-DEVELOPMENT-SNAPSHOT-2017-09-01-a/swift-4.0-DEVELOPMENT-SNAPSHOT-2017-09-01-a-ubuntu14.04.tar.gz +echo $SWIFT_URL +curl -fSsL $SWIFT_URL -o swift.tar.gz +tar -xzf swift.tar.gz --strip-components=2 --directory=local + +# Install protoc +PROTOC_URL=https://github.com/google/protobuf/releases/download/v3.4.0/protoc-3.4.0-linux-x86_64.zip +echo $PROTOC_URL +curl -fSsL $PROTOC_URL -o protoc.zip +unzip protoc.zip -d local + +# Verify installation +find local diff --git a/vendor/github.com/googleapis/gnostic/.travis.yml b/vendor/github.com/googleapis/gnostic/.travis.yml new file mode 100644 index 000000000..d31126d16 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/.travis.yml @@ -0,0 +1,46 @@ +# Travis CI build file for OpenAPI Compiler, including Go and Swift plugins + +# Use Ubuntu 14.04 +dist: trusty + +sudo: false + +language: go + +addons: + apt: + packages: + - clang-3.8 + - lldb-3.8 + - libicu-dev + - libtool + - libcurl4-openssl-dev + - libbsd-dev + - build-essential + - libssl-dev + - uuid-dev + - curl + - unzip + +install: + - ./.travis-install.sh + - export PATH=.:$HOME/local/bin:$PATH + - make + +script: + - go test . -v + - pushd plugins/gnostic-go-generator/examples/v2.0/bookstore + - make test + - popd + - pushd plugins/gnostic-go-generator/examples/v3.0/bookstore + - make test + - popd + - export PATH=.:$HOME/local/bin:$PATH + - export LD_LIBRARY_PATH=$HOME/local/lib + - pushd plugins/gnostic-swift-generator + - make + - cd examples/bookstore + - make + - .build/debug/Server & + - make test + diff --git a/vendor/github.com/googleapis/gnostic/COMPILE-PROTOS.sh b/vendor/github.com/googleapis/gnostic/COMPILE-PROTOS.sh new file mode 100755 index 000000000..017dab670 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/COMPILE-PROTOS.sh @@ -0,0 +1,34 @@ +#!/bin/sh +# +# Copyright 2016 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +go get github.com/golang/protobuf/protoc-gen-go + +protoc \ +--go_out=Mgoogle/protobuf/any.proto=github.com/golang/protobuf/ptypes/any:. \ +OpenAPIv2/OpenAPIv2.proto + +protoc \ +--go_out=:. \ +plugins/plugin.proto + +protoc \ +--go_out=Mgoogle/protobuf/any.proto=github.com/golang/protobuf/ptypes/any:. \ +OpenAPIv3/OpenAPIv3.proto + +protoc \ +--go_out=Mgoogle/protobuf/any.proto=github.com/golang/protobuf/ptypes/any:. \ +discovery/discovery.proto diff --git a/vendor/github.com/googleapis/gnostic/CONTRIBUTING.md b/vendor/github.com/googleapis/gnostic/CONTRIBUTING.md new file mode 100644 index 000000000..6736efd94 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/CONTRIBUTING.md @@ -0,0 +1,35 @@ +# How to become a contributor and submit your own code + +## Contributor License Agreements + +We'd love to accept your sample apps and patches! Before we can take them, we +have to jump a couple of legal hurdles. + +Please fill out either the individual or corporate Contributor License Agreement +(CLA). + + * If you are an individual writing original source code and you're sure you + own the intellectual property, then you'll need to sign an [individual CLA] + (https://developers.google.com/open-source/cla/individual). + * If you work for a company that wants to allow you to contribute your work, + then you'll need to sign a [corporate CLA] + (https://developers.google.com/open-source/cla/corporate). + +Follow either of the two links above to access the appropriate CLA and +instructions for how to sign and return it. Once we receive it, we'll be able to +accept your pull requests. + +## Contributing A Patch + +1. Submit an issue describing your proposed change to the repo in question. +1. The repo owner will respond to your issue promptly. +1. If your proposed change is accepted, and you haven't already done so, sign a + Contributor License Agreement (see details above). +1. Fork the desired repo, develop and test your code changes. +1. Ensure that your code adheres to the existing style in the sample to which + you are contributing. Refer to the + [Google Cloud Platform Samples Style Guide] + (https://github.com/GoogleCloudPlatform/Template/wiki/style.html) for the + recommended coding standards for this organization. +1. Ensure that your code has an appropriate set of unit tests which all pass. +1. Submit a pull request. diff --git a/vendor/github.com/googleapis/gnostic/LICENSE b/vendor/github.com/googleapis/gnostic/LICENSE new file mode 100644 index 000000000..6b0b1270f --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/LICENSE @@ -0,0 +1,203 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + diff --git a/vendor/github.com/googleapis/gnostic/Makefile b/vendor/github.com/googleapis/gnostic/Makefile new file mode 100644 index 000000000..8a772811e --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/Makefile @@ -0,0 +1,16 @@ + +build: + go get + go install + cd generate-gnostic; go get; go install + cd apps/disco; go get; go install + cd apps/report; go get; go install + cd apps/petstore-builder; go get; go install + cd plugins/gnostic-summary; go get; go install + cd plugins/gnostic-analyze; go get; go install + cd plugins/gnostic-go-generator; go get; go install + rm -f $(GOPATH)/bin/gnostic-go-client $(GOPATH)/bin/gnostic-go-server + ln -s $(GOPATH)/bin/gnostic-go-generator $(GOPATH)/bin/gnostic-go-client + ln -s $(GOPATH)/bin/gnostic-go-generator $(GOPATH)/bin/gnostic-go-server + cd extensions/sample; make + diff --git a/vendor/github.com/googleapis/gnostic/OpenAPIv2/OpenAPIv2.go b/vendor/github.com/googleapis/gnostic/OpenAPIv2/OpenAPIv2.go new file mode 100644 index 000000000..0e32451a3 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/OpenAPIv2/OpenAPIv2.go @@ -0,0 +1,8728 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// THIS FILE IS AUTOMATICALLY GENERATED. + +package openapi_v2 + +import ( + "fmt" + "github.com/googleapis/gnostic/compiler" + "gopkg.in/yaml.v2" + "regexp" + "strings" +) + +// Version returns the package name (and OpenAPI version). +func Version() string { + return "openapi_v2" +} + +// NewAdditionalPropertiesItem creates an object of type AdditionalPropertiesItem if possible, returning an error if not. +func NewAdditionalPropertiesItem(in interface{}, context *compiler.Context) (*AdditionalPropertiesItem, error) { + errors := make([]error, 0) + x := &AdditionalPropertiesItem{} + matched := false + // Schema schema = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewSchema(m, compiler.NewContext("schema", context)) + if matchingError == nil { + x.Oneof = &AdditionalPropertiesItem_Schema{Schema: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // bool boolean = 2; + boolValue, ok := in.(bool) + if ok { + x.Oneof = &AdditionalPropertiesItem_Boolean{Boolean: boolValue} + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewAny creates an object of type Any if possible, returning an error if not. +func NewAny(in interface{}, context *compiler.Context) (*Any, error) { + errors := make([]error, 0) + x := &Any{} + bytes, _ := yaml.Marshal(in) + x.Yaml = string(bytes) + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewApiKeySecurity creates an object of type ApiKeySecurity if possible, returning an error if not. +func NewApiKeySecurity(in interface{}, context *compiler.Context) (*ApiKeySecurity, error) { + errors := make([]error, 0) + x := &ApiKeySecurity{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"in", "name", "type"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"description", "in", "name", "type"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string type = 1; + v1 := compiler.MapValueForKey(m, "type") + if v1 != nil { + x.Type, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [apiKey] + if ok && !compiler.StringArrayContainsValue([]string{"apiKey"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string name = 2; + v2 := compiler.MapValueForKey(m, "name") + if v2 != nil { + x.Name, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string in = 3; + v3 := compiler.MapValueForKey(m, "in") + if v3 != nil { + x.In, ok = v3.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for in: %+v (%T)", v3, v3) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [header query] + if ok && !compiler.StringArrayContainsValue([]string{"header", "query"}, x.In) { + message := fmt.Sprintf("has unexpected value for in: %+v (%T)", v3, v3) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 4; + v4 := compiler.MapValueForKey(m, "description") + if v4 != nil { + x.Description, ok = v4.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v4, v4) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 5; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewBasicAuthenticationSecurity creates an object of type BasicAuthenticationSecurity if possible, returning an error if not. +func NewBasicAuthenticationSecurity(in interface{}, context *compiler.Context) (*BasicAuthenticationSecurity, error) { + errors := make([]error, 0) + x := &BasicAuthenticationSecurity{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"type"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"description", "type"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string type = 1; + v1 := compiler.MapValueForKey(m, "type") + if v1 != nil { + x.Type, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [basic] + if ok && !compiler.StringArrayContainsValue([]string{"basic"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 2; + v2 := compiler.MapValueForKey(m, "description") + if v2 != nil { + x.Description, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 3; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewBodyParameter creates an object of type BodyParameter if possible, returning an error if not. +func NewBodyParameter(in interface{}, context *compiler.Context) (*BodyParameter, error) { + errors := make([]error, 0) + x := &BodyParameter{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"in", "name", "schema"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"description", "in", "name", "required", "schema"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string description = 1; + v1 := compiler.MapValueForKey(m, "description") + if v1 != nil { + x.Description, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string name = 2; + v2 := compiler.MapValueForKey(m, "name") + if v2 != nil { + x.Name, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string in = 3; + v3 := compiler.MapValueForKey(m, "in") + if v3 != nil { + x.In, ok = v3.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for in: %+v (%T)", v3, v3) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [body] + if ok && !compiler.StringArrayContainsValue([]string{"body"}, x.In) { + message := fmt.Sprintf("has unexpected value for in: %+v (%T)", v3, v3) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool required = 4; + v4 := compiler.MapValueForKey(m, "required") + if v4 != nil { + x.Required, ok = v4.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for required: %+v (%T)", v4, v4) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Schema schema = 5; + v5 := compiler.MapValueForKey(m, "schema") + if v5 != nil { + var err error + x.Schema, err = NewSchema(v5, compiler.NewContext("schema", context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny vendor_extension = 6; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewContact creates an object of type Contact if possible, returning an error if not. +func NewContact(in interface{}, context *compiler.Context) (*Contact, error) { + errors := make([]error, 0) + x := &Contact{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"email", "name", "url"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string url = 2; + v2 := compiler.MapValueForKey(m, "url") + if v2 != nil { + x.Url, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for url: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string email = 3; + v3 := compiler.MapValueForKey(m, "email") + if v3 != nil { + x.Email, ok = v3.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for email: %+v (%T)", v3, v3) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 4; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewDefault creates an object of type Default if possible, returning an error if not. +func NewDefault(in interface{}, context *compiler.Context) (*Default, error) { + errors := make([]error, 0) + x := &Default{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedAny additional_properties = 1; + // MAP: Any + x.AdditionalProperties = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewDefinitions creates an object of type Definitions if possible, returning an error if not. +func NewDefinitions(in interface{}, context *compiler.Context) (*Definitions, error) { + errors := make([]error, 0) + x := &Definitions{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedSchema additional_properties = 1; + // MAP: Schema + x.AdditionalProperties = make([]*NamedSchema, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + pair := &NamedSchema{} + pair.Name = k + var err error + pair.Value, err = NewSchema(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewDocument creates an object of type Document if possible, returning an error if not. +func NewDocument(in interface{}, context *compiler.Context) (*Document, error) { + errors := make([]error, 0) + x := &Document{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"info", "paths", "swagger"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"basePath", "consumes", "definitions", "externalDocs", "host", "info", "parameters", "paths", "produces", "responses", "schemes", "security", "securityDefinitions", "swagger", "tags"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string swagger = 1; + v1 := compiler.MapValueForKey(m, "swagger") + if v1 != nil { + x.Swagger, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for swagger: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [2.0] + if ok && !compiler.StringArrayContainsValue([]string{"2.0"}, x.Swagger) { + message := fmt.Sprintf("has unexpected value for swagger: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Info info = 2; + v2 := compiler.MapValueForKey(m, "info") + if v2 != nil { + var err error + x.Info, err = NewInfo(v2, compiler.NewContext("info", context)) + if err != nil { + errors = append(errors, err) + } + } + // string host = 3; + v3 := compiler.MapValueForKey(m, "host") + if v3 != nil { + x.Host, ok = v3.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for host: %+v (%T)", v3, v3) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string base_path = 4; + v4 := compiler.MapValueForKey(m, "basePath") + if v4 != nil { + x.BasePath, ok = v4.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for basePath: %+v (%T)", v4, v4) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated string schemes = 5; + v5 := compiler.MapValueForKey(m, "schemes") + if v5 != nil { + v, ok := v5.([]interface{}) + if ok { + x.Schemes = compiler.ConvertInterfaceArrayToStringArray(v) + } else { + message := fmt.Sprintf("has unexpected value for schemes: %+v (%T)", v5, v5) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [http https ws wss] + if ok && !compiler.StringArrayContainsValues([]string{"http", "https", "ws", "wss"}, x.Schemes) { + message := fmt.Sprintf("has unexpected value for schemes: %+v", v5) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated string consumes = 6; + v6 := compiler.MapValueForKey(m, "consumes") + if v6 != nil { + v, ok := v6.([]interface{}) + if ok { + x.Consumes = compiler.ConvertInterfaceArrayToStringArray(v) + } else { + message := fmt.Sprintf("has unexpected value for consumes: %+v (%T)", v6, v6) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated string produces = 7; + v7 := compiler.MapValueForKey(m, "produces") + if v7 != nil { + v, ok := v7.([]interface{}) + if ok { + x.Produces = compiler.ConvertInterfaceArrayToStringArray(v) + } else { + message := fmt.Sprintf("has unexpected value for produces: %+v (%T)", v7, v7) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Paths paths = 8; + v8 := compiler.MapValueForKey(m, "paths") + if v8 != nil { + var err error + x.Paths, err = NewPaths(v8, compiler.NewContext("paths", context)) + if err != nil { + errors = append(errors, err) + } + } + // Definitions definitions = 9; + v9 := compiler.MapValueForKey(m, "definitions") + if v9 != nil { + var err error + x.Definitions, err = NewDefinitions(v9, compiler.NewContext("definitions", context)) + if err != nil { + errors = append(errors, err) + } + } + // ParameterDefinitions parameters = 10; + v10 := compiler.MapValueForKey(m, "parameters") + if v10 != nil { + var err error + x.Parameters, err = NewParameterDefinitions(v10, compiler.NewContext("parameters", context)) + if err != nil { + errors = append(errors, err) + } + } + // ResponseDefinitions responses = 11; + v11 := compiler.MapValueForKey(m, "responses") + if v11 != nil { + var err error + x.Responses, err = NewResponseDefinitions(v11, compiler.NewContext("responses", context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated SecurityRequirement security = 12; + v12 := compiler.MapValueForKey(m, "security") + if v12 != nil { + // repeated SecurityRequirement + x.Security = make([]*SecurityRequirement, 0) + a, ok := v12.([]interface{}) + if ok { + for _, item := range a { + y, err := NewSecurityRequirement(item, compiler.NewContext("security", context)) + if err != nil { + errors = append(errors, err) + } + x.Security = append(x.Security, y) + } + } + } + // SecurityDefinitions security_definitions = 13; + v13 := compiler.MapValueForKey(m, "securityDefinitions") + if v13 != nil { + var err error + x.SecurityDefinitions, err = NewSecurityDefinitions(v13, compiler.NewContext("securityDefinitions", context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated Tag tags = 14; + v14 := compiler.MapValueForKey(m, "tags") + if v14 != nil { + // repeated Tag + x.Tags = make([]*Tag, 0) + a, ok := v14.([]interface{}) + if ok { + for _, item := range a { + y, err := NewTag(item, compiler.NewContext("tags", context)) + if err != nil { + errors = append(errors, err) + } + x.Tags = append(x.Tags, y) + } + } + } + // ExternalDocs external_docs = 15; + v15 := compiler.MapValueForKey(m, "externalDocs") + if v15 != nil { + var err error + x.ExternalDocs, err = NewExternalDocs(v15, compiler.NewContext("externalDocs", context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny vendor_extension = 16; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewExamples creates an object of type Examples if possible, returning an error if not. +func NewExamples(in interface{}, context *compiler.Context) (*Examples, error) { + errors := make([]error, 0) + x := &Examples{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedAny additional_properties = 1; + // MAP: Any + x.AdditionalProperties = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewExternalDocs creates an object of type ExternalDocs if possible, returning an error if not. +func NewExternalDocs(in interface{}, context *compiler.Context) (*ExternalDocs, error) { + errors := make([]error, 0) + x := &ExternalDocs{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"url"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"description", "url"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string description = 1; + v1 := compiler.MapValueForKey(m, "description") + if v1 != nil { + x.Description, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string url = 2; + v2 := compiler.MapValueForKey(m, "url") + if v2 != nil { + x.Url, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for url: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 3; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewFileSchema creates an object of type FileSchema if possible, returning an error if not. +func NewFileSchema(in interface{}, context *compiler.Context) (*FileSchema, error) { + errors := make([]error, 0) + x := &FileSchema{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"type"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"default", "description", "example", "externalDocs", "format", "readOnly", "required", "title", "type"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string format = 1; + v1 := compiler.MapValueForKey(m, "format") + if v1 != nil { + x.Format, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for format: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string title = 2; + v2 := compiler.MapValueForKey(m, "title") + if v2 != nil { + x.Title, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for title: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 3; + v3 := compiler.MapValueForKey(m, "description") + if v3 != nil { + x.Description, ok = v3.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v3, v3) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Any default = 4; + v4 := compiler.MapValueForKey(m, "default") + if v4 != nil { + var err error + x.Default, err = NewAny(v4, compiler.NewContext("default", context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated string required = 5; + v5 := compiler.MapValueForKey(m, "required") + if v5 != nil { + v, ok := v5.([]interface{}) + if ok { + x.Required = compiler.ConvertInterfaceArrayToStringArray(v) + } else { + message := fmt.Sprintf("has unexpected value for required: %+v (%T)", v5, v5) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string type = 6; + v6 := compiler.MapValueForKey(m, "type") + if v6 != nil { + x.Type, ok = v6.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %+v (%T)", v6, v6) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [file] + if ok && !compiler.StringArrayContainsValue([]string{"file"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %+v (%T)", v6, v6) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool read_only = 7; + v7 := compiler.MapValueForKey(m, "readOnly") + if v7 != nil { + x.ReadOnly, ok = v7.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for readOnly: %+v (%T)", v7, v7) + errors = append(errors, compiler.NewError(context, message)) + } + } + // ExternalDocs external_docs = 8; + v8 := compiler.MapValueForKey(m, "externalDocs") + if v8 != nil { + var err error + x.ExternalDocs, err = NewExternalDocs(v8, compiler.NewContext("externalDocs", context)) + if err != nil { + errors = append(errors, err) + } + } + // Any example = 9; + v9 := compiler.MapValueForKey(m, "example") + if v9 != nil { + var err error + x.Example, err = NewAny(v9, compiler.NewContext("example", context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny vendor_extension = 10; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewFormDataParameterSubSchema creates an object of type FormDataParameterSubSchema if possible, returning an error if not. +func NewFormDataParameterSubSchema(in interface{}, context *compiler.Context) (*FormDataParameterSubSchema, error) { + errors := make([]error, 0) + x := &FormDataParameterSubSchema{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"allowEmptyValue", "collectionFormat", "default", "description", "enum", "exclusiveMaximum", "exclusiveMinimum", "format", "in", "items", "maxItems", "maxLength", "maximum", "minItems", "minLength", "minimum", "multipleOf", "name", "pattern", "required", "type", "uniqueItems"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // bool required = 1; + v1 := compiler.MapValueForKey(m, "required") + if v1 != nil { + x.Required, ok = v1.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for required: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string in = 2; + v2 := compiler.MapValueForKey(m, "in") + if v2 != nil { + x.In, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for in: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [formData] + if ok && !compiler.StringArrayContainsValue([]string{"formData"}, x.In) { + message := fmt.Sprintf("has unexpected value for in: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 3; + v3 := compiler.MapValueForKey(m, "description") + if v3 != nil { + x.Description, ok = v3.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v3, v3) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string name = 4; + v4 := compiler.MapValueForKey(m, "name") + if v4 != nil { + x.Name, ok = v4.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v4, v4) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool allow_empty_value = 5; + v5 := compiler.MapValueForKey(m, "allowEmptyValue") + if v5 != nil { + x.AllowEmptyValue, ok = v5.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for allowEmptyValue: %+v (%T)", v5, v5) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string type = 6; + v6 := compiler.MapValueForKey(m, "type") + if v6 != nil { + x.Type, ok = v6.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %+v (%T)", v6, v6) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [string number boolean integer array file] + if ok && !compiler.StringArrayContainsValue([]string{"string", "number", "boolean", "integer", "array", "file"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %+v (%T)", v6, v6) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string format = 7; + v7 := compiler.MapValueForKey(m, "format") + if v7 != nil { + x.Format, ok = v7.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for format: %+v (%T)", v7, v7) + errors = append(errors, compiler.NewError(context, message)) + } + } + // PrimitivesItems items = 8; + v8 := compiler.MapValueForKey(m, "items") + if v8 != nil { + var err error + x.Items, err = NewPrimitivesItems(v8, compiler.NewContext("items", context)) + if err != nil { + errors = append(errors, err) + } + } + // string collection_format = 9; + v9 := compiler.MapValueForKey(m, "collectionFormat") + if v9 != nil { + x.CollectionFormat, ok = v9.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for collectionFormat: %+v (%T)", v9, v9) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [csv ssv tsv pipes multi] + if ok && !compiler.StringArrayContainsValue([]string{"csv", "ssv", "tsv", "pipes", "multi"}, x.CollectionFormat) { + message := fmt.Sprintf("has unexpected value for collectionFormat: %+v (%T)", v9, v9) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Any default = 10; + v10 := compiler.MapValueForKey(m, "default") + if v10 != nil { + var err error + x.Default, err = NewAny(v10, compiler.NewContext("default", context)) + if err != nil { + errors = append(errors, err) + } + } + // float maximum = 11; + v11 := compiler.MapValueForKey(m, "maximum") + if v11 != nil { + switch v11 := v11.(type) { + case float64: + x.Maximum = v11 + case float32: + x.Maximum = float64(v11) + case uint64: + x.Maximum = float64(v11) + case uint32: + x.Maximum = float64(v11) + case int64: + x.Maximum = float64(v11) + case int32: + x.Maximum = float64(v11) + case int: + x.Maximum = float64(v11) + default: + message := fmt.Sprintf("has unexpected value for maximum: %+v (%T)", v11, v11) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_maximum = 12; + v12 := compiler.MapValueForKey(m, "exclusiveMaximum") + if v12 != nil { + x.ExclusiveMaximum, ok = v12.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMaximum: %+v (%T)", v12, v12) + errors = append(errors, compiler.NewError(context, message)) + } + } + // float minimum = 13; + v13 := compiler.MapValueForKey(m, "minimum") + if v13 != nil { + switch v13 := v13.(type) { + case float64: + x.Minimum = v13 + case float32: + x.Minimum = float64(v13) + case uint64: + x.Minimum = float64(v13) + case uint32: + x.Minimum = float64(v13) + case int64: + x.Minimum = float64(v13) + case int32: + x.Minimum = float64(v13) + case int: + x.Minimum = float64(v13) + default: + message := fmt.Sprintf("has unexpected value for minimum: %+v (%T)", v13, v13) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_minimum = 14; + v14 := compiler.MapValueForKey(m, "exclusiveMinimum") + if v14 != nil { + x.ExclusiveMinimum, ok = v14.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMinimum: %+v (%T)", v14, v14) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_length = 15; + v15 := compiler.MapValueForKey(m, "maxLength") + if v15 != nil { + t, ok := v15.(int) + if ok { + x.MaxLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxLength: %+v (%T)", v15, v15) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_length = 16; + v16 := compiler.MapValueForKey(m, "minLength") + if v16 != nil { + t, ok := v16.(int) + if ok { + x.MinLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minLength: %+v (%T)", v16, v16) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string pattern = 17; + v17 := compiler.MapValueForKey(m, "pattern") + if v17 != nil { + x.Pattern, ok = v17.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for pattern: %+v (%T)", v17, v17) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_items = 18; + v18 := compiler.MapValueForKey(m, "maxItems") + if v18 != nil { + t, ok := v18.(int) + if ok { + x.MaxItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxItems: %+v (%T)", v18, v18) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_items = 19; + v19 := compiler.MapValueForKey(m, "minItems") + if v19 != nil { + t, ok := v19.(int) + if ok { + x.MinItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minItems: %+v (%T)", v19, v19) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool unique_items = 20; + v20 := compiler.MapValueForKey(m, "uniqueItems") + if v20 != nil { + x.UniqueItems, ok = v20.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for uniqueItems: %+v (%T)", v20, v20) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated Any enum = 21; + v21 := compiler.MapValueForKey(m, "enum") + if v21 != nil { + // repeated Any + x.Enum = make([]*Any, 0) + a, ok := v21.([]interface{}) + if ok { + for _, item := range a { + y, err := NewAny(item, compiler.NewContext("enum", context)) + if err != nil { + errors = append(errors, err) + } + x.Enum = append(x.Enum, y) + } + } + } + // float multiple_of = 22; + v22 := compiler.MapValueForKey(m, "multipleOf") + if v22 != nil { + switch v22 := v22.(type) { + case float64: + x.MultipleOf = v22 + case float32: + x.MultipleOf = float64(v22) + case uint64: + x.MultipleOf = float64(v22) + case uint32: + x.MultipleOf = float64(v22) + case int64: + x.MultipleOf = float64(v22) + case int32: + x.MultipleOf = float64(v22) + case int: + x.MultipleOf = float64(v22) + default: + message := fmt.Sprintf("has unexpected value for multipleOf: %+v (%T)", v22, v22) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 23; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewHeader creates an object of type Header if possible, returning an error if not. +func NewHeader(in interface{}, context *compiler.Context) (*Header, error) { + errors := make([]error, 0) + x := &Header{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"type"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"collectionFormat", "default", "description", "enum", "exclusiveMaximum", "exclusiveMinimum", "format", "items", "maxItems", "maxLength", "maximum", "minItems", "minLength", "minimum", "multipleOf", "pattern", "type", "uniqueItems"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string type = 1; + v1 := compiler.MapValueForKey(m, "type") + if v1 != nil { + x.Type, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [string number integer boolean array] + if ok && !compiler.StringArrayContainsValue([]string{"string", "number", "integer", "boolean", "array"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string format = 2; + v2 := compiler.MapValueForKey(m, "format") + if v2 != nil { + x.Format, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for format: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // PrimitivesItems items = 3; + v3 := compiler.MapValueForKey(m, "items") + if v3 != nil { + var err error + x.Items, err = NewPrimitivesItems(v3, compiler.NewContext("items", context)) + if err != nil { + errors = append(errors, err) + } + } + // string collection_format = 4; + v4 := compiler.MapValueForKey(m, "collectionFormat") + if v4 != nil { + x.CollectionFormat, ok = v4.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for collectionFormat: %+v (%T)", v4, v4) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [csv ssv tsv pipes] + if ok && !compiler.StringArrayContainsValue([]string{"csv", "ssv", "tsv", "pipes"}, x.CollectionFormat) { + message := fmt.Sprintf("has unexpected value for collectionFormat: %+v (%T)", v4, v4) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Any default = 5; + v5 := compiler.MapValueForKey(m, "default") + if v5 != nil { + var err error + x.Default, err = NewAny(v5, compiler.NewContext("default", context)) + if err != nil { + errors = append(errors, err) + } + } + // float maximum = 6; + v6 := compiler.MapValueForKey(m, "maximum") + if v6 != nil { + switch v6 := v6.(type) { + case float64: + x.Maximum = v6 + case float32: + x.Maximum = float64(v6) + case uint64: + x.Maximum = float64(v6) + case uint32: + x.Maximum = float64(v6) + case int64: + x.Maximum = float64(v6) + case int32: + x.Maximum = float64(v6) + case int: + x.Maximum = float64(v6) + default: + message := fmt.Sprintf("has unexpected value for maximum: %+v (%T)", v6, v6) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_maximum = 7; + v7 := compiler.MapValueForKey(m, "exclusiveMaximum") + if v7 != nil { + x.ExclusiveMaximum, ok = v7.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMaximum: %+v (%T)", v7, v7) + errors = append(errors, compiler.NewError(context, message)) + } + } + // float minimum = 8; + v8 := compiler.MapValueForKey(m, "minimum") + if v8 != nil { + switch v8 := v8.(type) { + case float64: + x.Minimum = v8 + case float32: + x.Minimum = float64(v8) + case uint64: + x.Minimum = float64(v8) + case uint32: + x.Minimum = float64(v8) + case int64: + x.Minimum = float64(v8) + case int32: + x.Minimum = float64(v8) + case int: + x.Minimum = float64(v8) + default: + message := fmt.Sprintf("has unexpected value for minimum: %+v (%T)", v8, v8) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_minimum = 9; + v9 := compiler.MapValueForKey(m, "exclusiveMinimum") + if v9 != nil { + x.ExclusiveMinimum, ok = v9.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMinimum: %+v (%T)", v9, v9) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_length = 10; + v10 := compiler.MapValueForKey(m, "maxLength") + if v10 != nil { + t, ok := v10.(int) + if ok { + x.MaxLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxLength: %+v (%T)", v10, v10) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_length = 11; + v11 := compiler.MapValueForKey(m, "minLength") + if v11 != nil { + t, ok := v11.(int) + if ok { + x.MinLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minLength: %+v (%T)", v11, v11) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string pattern = 12; + v12 := compiler.MapValueForKey(m, "pattern") + if v12 != nil { + x.Pattern, ok = v12.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for pattern: %+v (%T)", v12, v12) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_items = 13; + v13 := compiler.MapValueForKey(m, "maxItems") + if v13 != nil { + t, ok := v13.(int) + if ok { + x.MaxItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxItems: %+v (%T)", v13, v13) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_items = 14; + v14 := compiler.MapValueForKey(m, "minItems") + if v14 != nil { + t, ok := v14.(int) + if ok { + x.MinItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minItems: %+v (%T)", v14, v14) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool unique_items = 15; + v15 := compiler.MapValueForKey(m, "uniqueItems") + if v15 != nil { + x.UniqueItems, ok = v15.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for uniqueItems: %+v (%T)", v15, v15) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated Any enum = 16; + v16 := compiler.MapValueForKey(m, "enum") + if v16 != nil { + // repeated Any + x.Enum = make([]*Any, 0) + a, ok := v16.([]interface{}) + if ok { + for _, item := range a { + y, err := NewAny(item, compiler.NewContext("enum", context)) + if err != nil { + errors = append(errors, err) + } + x.Enum = append(x.Enum, y) + } + } + } + // float multiple_of = 17; + v17 := compiler.MapValueForKey(m, "multipleOf") + if v17 != nil { + switch v17 := v17.(type) { + case float64: + x.MultipleOf = v17 + case float32: + x.MultipleOf = float64(v17) + case uint64: + x.MultipleOf = float64(v17) + case uint32: + x.MultipleOf = float64(v17) + case int64: + x.MultipleOf = float64(v17) + case int32: + x.MultipleOf = float64(v17) + case int: + x.MultipleOf = float64(v17) + default: + message := fmt.Sprintf("has unexpected value for multipleOf: %+v (%T)", v17, v17) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 18; + v18 := compiler.MapValueForKey(m, "description") + if v18 != nil { + x.Description, ok = v18.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v18, v18) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 19; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewHeaderParameterSubSchema creates an object of type HeaderParameterSubSchema if possible, returning an error if not. +func NewHeaderParameterSubSchema(in interface{}, context *compiler.Context) (*HeaderParameterSubSchema, error) { + errors := make([]error, 0) + x := &HeaderParameterSubSchema{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"collectionFormat", "default", "description", "enum", "exclusiveMaximum", "exclusiveMinimum", "format", "in", "items", "maxItems", "maxLength", "maximum", "minItems", "minLength", "minimum", "multipleOf", "name", "pattern", "required", "type", "uniqueItems"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // bool required = 1; + v1 := compiler.MapValueForKey(m, "required") + if v1 != nil { + x.Required, ok = v1.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for required: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string in = 2; + v2 := compiler.MapValueForKey(m, "in") + if v2 != nil { + x.In, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for in: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [header] + if ok && !compiler.StringArrayContainsValue([]string{"header"}, x.In) { + message := fmt.Sprintf("has unexpected value for in: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 3; + v3 := compiler.MapValueForKey(m, "description") + if v3 != nil { + x.Description, ok = v3.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v3, v3) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string name = 4; + v4 := compiler.MapValueForKey(m, "name") + if v4 != nil { + x.Name, ok = v4.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v4, v4) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string type = 5; + v5 := compiler.MapValueForKey(m, "type") + if v5 != nil { + x.Type, ok = v5.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %+v (%T)", v5, v5) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [string number boolean integer array] + if ok && !compiler.StringArrayContainsValue([]string{"string", "number", "boolean", "integer", "array"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %+v (%T)", v5, v5) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string format = 6; + v6 := compiler.MapValueForKey(m, "format") + if v6 != nil { + x.Format, ok = v6.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for format: %+v (%T)", v6, v6) + errors = append(errors, compiler.NewError(context, message)) + } + } + // PrimitivesItems items = 7; + v7 := compiler.MapValueForKey(m, "items") + if v7 != nil { + var err error + x.Items, err = NewPrimitivesItems(v7, compiler.NewContext("items", context)) + if err != nil { + errors = append(errors, err) + } + } + // string collection_format = 8; + v8 := compiler.MapValueForKey(m, "collectionFormat") + if v8 != nil { + x.CollectionFormat, ok = v8.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for collectionFormat: %+v (%T)", v8, v8) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [csv ssv tsv pipes] + if ok && !compiler.StringArrayContainsValue([]string{"csv", "ssv", "tsv", "pipes"}, x.CollectionFormat) { + message := fmt.Sprintf("has unexpected value for collectionFormat: %+v (%T)", v8, v8) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Any default = 9; + v9 := compiler.MapValueForKey(m, "default") + if v9 != nil { + var err error + x.Default, err = NewAny(v9, compiler.NewContext("default", context)) + if err != nil { + errors = append(errors, err) + } + } + // float maximum = 10; + v10 := compiler.MapValueForKey(m, "maximum") + if v10 != nil { + switch v10 := v10.(type) { + case float64: + x.Maximum = v10 + case float32: + x.Maximum = float64(v10) + case uint64: + x.Maximum = float64(v10) + case uint32: + x.Maximum = float64(v10) + case int64: + x.Maximum = float64(v10) + case int32: + x.Maximum = float64(v10) + case int: + x.Maximum = float64(v10) + default: + message := fmt.Sprintf("has unexpected value for maximum: %+v (%T)", v10, v10) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_maximum = 11; + v11 := compiler.MapValueForKey(m, "exclusiveMaximum") + if v11 != nil { + x.ExclusiveMaximum, ok = v11.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMaximum: %+v (%T)", v11, v11) + errors = append(errors, compiler.NewError(context, message)) + } + } + // float minimum = 12; + v12 := compiler.MapValueForKey(m, "minimum") + if v12 != nil { + switch v12 := v12.(type) { + case float64: + x.Minimum = v12 + case float32: + x.Minimum = float64(v12) + case uint64: + x.Minimum = float64(v12) + case uint32: + x.Minimum = float64(v12) + case int64: + x.Minimum = float64(v12) + case int32: + x.Minimum = float64(v12) + case int: + x.Minimum = float64(v12) + default: + message := fmt.Sprintf("has unexpected value for minimum: %+v (%T)", v12, v12) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_minimum = 13; + v13 := compiler.MapValueForKey(m, "exclusiveMinimum") + if v13 != nil { + x.ExclusiveMinimum, ok = v13.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMinimum: %+v (%T)", v13, v13) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_length = 14; + v14 := compiler.MapValueForKey(m, "maxLength") + if v14 != nil { + t, ok := v14.(int) + if ok { + x.MaxLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxLength: %+v (%T)", v14, v14) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_length = 15; + v15 := compiler.MapValueForKey(m, "minLength") + if v15 != nil { + t, ok := v15.(int) + if ok { + x.MinLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minLength: %+v (%T)", v15, v15) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string pattern = 16; + v16 := compiler.MapValueForKey(m, "pattern") + if v16 != nil { + x.Pattern, ok = v16.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for pattern: %+v (%T)", v16, v16) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_items = 17; + v17 := compiler.MapValueForKey(m, "maxItems") + if v17 != nil { + t, ok := v17.(int) + if ok { + x.MaxItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxItems: %+v (%T)", v17, v17) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_items = 18; + v18 := compiler.MapValueForKey(m, "minItems") + if v18 != nil { + t, ok := v18.(int) + if ok { + x.MinItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minItems: %+v (%T)", v18, v18) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool unique_items = 19; + v19 := compiler.MapValueForKey(m, "uniqueItems") + if v19 != nil { + x.UniqueItems, ok = v19.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for uniqueItems: %+v (%T)", v19, v19) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated Any enum = 20; + v20 := compiler.MapValueForKey(m, "enum") + if v20 != nil { + // repeated Any + x.Enum = make([]*Any, 0) + a, ok := v20.([]interface{}) + if ok { + for _, item := range a { + y, err := NewAny(item, compiler.NewContext("enum", context)) + if err != nil { + errors = append(errors, err) + } + x.Enum = append(x.Enum, y) + } + } + } + // float multiple_of = 21; + v21 := compiler.MapValueForKey(m, "multipleOf") + if v21 != nil { + switch v21 := v21.(type) { + case float64: + x.MultipleOf = v21 + case float32: + x.MultipleOf = float64(v21) + case uint64: + x.MultipleOf = float64(v21) + case uint32: + x.MultipleOf = float64(v21) + case int64: + x.MultipleOf = float64(v21) + case int32: + x.MultipleOf = float64(v21) + case int: + x.MultipleOf = float64(v21) + default: + message := fmt.Sprintf("has unexpected value for multipleOf: %+v (%T)", v21, v21) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 22; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewHeaders creates an object of type Headers if possible, returning an error if not. +func NewHeaders(in interface{}, context *compiler.Context) (*Headers, error) { + errors := make([]error, 0) + x := &Headers{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedHeader additional_properties = 1; + // MAP: Header + x.AdditionalProperties = make([]*NamedHeader, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + pair := &NamedHeader{} + pair.Name = k + var err error + pair.Value, err = NewHeader(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewInfo creates an object of type Info if possible, returning an error if not. +func NewInfo(in interface{}, context *compiler.Context) (*Info, error) { + errors := make([]error, 0) + x := &Info{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"title", "version"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"contact", "description", "license", "termsOfService", "title", "version"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string title = 1; + v1 := compiler.MapValueForKey(m, "title") + if v1 != nil { + x.Title, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for title: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string version = 2; + v2 := compiler.MapValueForKey(m, "version") + if v2 != nil { + x.Version, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for version: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 3; + v3 := compiler.MapValueForKey(m, "description") + if v3 != nil { + x.Description, ok = v3.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v3, v3) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string terms_of_service = 4; + v4 := compiler.MapValueForKey(m, "termsOfService") + if v4 != nil { + x.TermsOfService, ok = v4.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for termsOfService: %+v (%T)", v4, v4) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Contact contact = 5; + v5 := compiler.MapValueForKey(m, "contact") + if v5 != nil { + var err error + x.Contact, err = NewContact(v5, compiler.NewContext("contact", context)) + if err != nil { + errors = append(errors, err) + } + } + // License license = 6; + v6 := compiler.MapValueForKey(m, "license") + if v6 != nil { + var err error + x.License, err = NewLicense(v6, compiler.NewContext("license", context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny vendor_extension = 7; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewItemsItem creates an object of type ItemsItem if possible, returning an error if not. +func NewItemsItem(in interface{}, context *compiler.Context) (*ItemsItem, error) { + errors := make([]error, 0) + x := &ItemsItem{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value for item array: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + x.Schema = make([]*Schema, 0) + y, err := NewSchema(m, compiler.NewContext("", context)) + if err != nil { + return nil, err + } + x.Schema = append(x.Schema, y) + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewJsonReference creates an object of type JsonReference if possible, returning an error if not. +func NewJsonReference(in interface{}, context *compiler.Context) (*JsonReference, error) { + errors := make([]error, 0) + x := &JsonReference{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"$ref"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"$ref", "description"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string _ref = 1; + v1 := compiler.MapValueForKey(m, "$ref") + if v1 != nil { + x.XRef, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for $ref: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 2; + v2 := compiler.MapValueForKey(m, "description") + if v2 != nil { + x.Description, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewLicense creates an object of type License if possible, returning an error if not. +func NewLicense(in interface{}, context *compiler.Context) (*License, error) { + errors := make([]error, 0) + x := &License{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"name"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"name", "url"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string url = 2; + v2 := compiler.MapValueForKey(m, "url") + if v2 != nil { + x.Url, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for url: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 3; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedAny creates an object of type NamedAny if possible, returning an error if not. +func NewNamedAny(in interface{}, context *compiler.Context) (*NamedAny, error) { + errors := make([]error, 0) + x := &NamedAny{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Any value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewAny(v2, compiler.NewContext("value", context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedHeader creates an object of type NamedHeader if possible, returning an error if not. +func NewNamedHeader(in interface{}, context *compiler.Context) (*NamedHeader, error) { + errors := make([]error, 0) + x := &NamedHeader{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Header value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewHeader(v2, compiler.NewContext("value", context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedParameter creates an object of type NamedParameter if possible, returning an error if not. +func NewNamedParameter(in interface{}, context *compiler.Context) (*NamedParameter, error) { + errors := make([]error, 0) + x := &NamedParameter{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Parameter value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewParameter(v2, compiler.NewContext("value", context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedPathItem creates an object of type NamedPathItem if possible, returning an error if not. +func NewNamedPathItem(in interface{}, context *compiler.Context) (*NamedPathItem, error) { + errors := make([]error, 0) + x := &NamedPathItem{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // PathItem value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewPathItem(v2, compiler.NewContext("value", context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedResponse creates an object of type NamedResponse if possible, returning an error if not. +func NewNamedResponse(in interface{}, context *compiler.Context) (*NamedResponse, error) { + errors := make([]error, 0) + x := &NamedResponse{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Response value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewResponse(v2, compiler.NewContext("value", context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedResponseValue creates an object of type NamedResponseValue if possible, returning an error if not. +func NewNamedResponseValue(in interface{}, context *compiler.Context) (*NamedResponseValue, error) { + errors := make([]error, 0) + x := &NamedResponseValue{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // ResponseValue value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewResponseValue(v2, compiler.NewContext("value", context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedSchema creates an object of type NamedSchema if possible, returning an error if not. +func NewNamedSchema(in interface{}, context *compiler.Context) (*NamedSchema, error) { + errors := make([]error, 0) + x := &NamedSchema{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Schema value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewSchema(v2, compiler.NewContext("value", context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedSecurityDefinitionsItem creates an object of type NamedSecurityDefinitionsItem if possible, returning an error if not. +func NewNamedSecurityDefinitionsItem(in interface{}, context *compiler.Context) (*NamedSecurityDefinitionsItem, error) { + errors := make([]error, 0) + x := &NamedSecurityDefinitionsItem{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // SecurityDefinitionsItem value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewSecurityDefinitionsItem(v2, compiler.NewContext("value", context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedString creates an object of type NamedString if possible, returning an error if not. +func NewNamedString(in interface{}, context *compiler.Context) (*NamedString, error) { + errors := make([]error, 0) + x := &NamedString{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + x.Value, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for value: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedStringArray creates an object of type NamedStringArray if possible, returning an error if not. +func NewNamedStringArray(in interface{}, context *compiler.Context) (*NamedStringArray, error) { + errors := make([]error, 0) + x := &NamedStringArray{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // StringArray value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewStringArray(v2, compiler.NewContext("value", context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNonBodyParameter creates an object of type NonBodyParameter if possible, returning an error if not. +func NewNonBodyParameter(in interface{}, context *compiler.Context) (*NonBodyParameter, error) { + errors := make([]error, 0) + x := &NonBodyParameter{} + matched := false + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"in", "name", "type"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // HeaderParameterSubSchema header_parameter_sub_schema = 1; + { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewHeaderParameterSubSchema(m, compiler.NewContext("headerParameterSubSchema", context)) + if matchingError == nil { + x.Oneof = &NonBodyParameter_HeaderParameterSubSchema{HeaderParameterSubSchema: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + // FormDataParameterSubSchema form_data_parameter_sub_schema = 2; + { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewFormDataParameterSubSchema(m, compiler.NewContext("formDataParameterSubSchema", context)) + if matchingError == nil { + x.Oneof = &NonBodyParameter_FormDataParameterSubSchema{FormDataParameterSubSchema: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + // QueryParameterSubSchema query_parameter_sub_schema = 3; + { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewQueryParameterSubSchema(m, compiler.NewContext("queryParameterSubSchema", context)) + if matchingError == nil { + x.Oneof = &NonBodyParameter_QueryParameterSubSchema{QueryParameterSubSchema: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + // PathParameterSubSchema path_parameter_sub_schema = 4; + { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewPathParameterSubSchema(m, compiler.NewContext("pathParameterSubSchema", context)) + if matchingError == nil { + x.Oneof = &NonBodyParameter_PathParameterSubSchema{PathParameterSubSchema: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewOauth2AccessCodeSecurity creates an object of type Oauth2AccessCodeSecurity if possible, returning an error if not. +func NewOauth2AccessCodeSecurity(in interface{}, context *compiler.Context) (*Oauth2AccessCodeSecurity, error) { + errors := make([]error, 0) + x := &Oauth2AccessCodeSecurity{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"authorizationUrl", "flow", "tokenUrl", "type"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"authorizationUrl", "description", "flow", "scopes", "tokenUrl", "type"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string type = 1; + v1 := compiler.MapValueForKey(m, "type") + if v1 != nil { + x.Type, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [oauth2] + if ok && !compiler.StringArrayContainsValue([]string{"oauth2"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string flow = 2; + v2 := compiler.MapValueForKey(m, "flow") + if v2 != nil { + x.Flow, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for flow: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [accessCode] + if ok && !compiler.StringArrayContainsValue([]string{"accessCode"}, x.Flow) { + message := fmt.Sprintf("has unexpected value for flow: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Oauth2Scopes scopes = 3; + v3 := compiler.MapValueForKey(m, "scopes") + if v3 != nil { + var err error + x.Scopes, err = NewOauth2Scopes(v3, compiler.NewContext("scopes", context)) + if err != nil { + errors = append(errors, err) + } + } + // string authorization_url = 4; + v4 := compiler.MapValueForKey(m, "authorizationUrl") + if v4 != nil { + x.AuthorizationUrl, ok = v4.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for authorizationUrl: %+v (%T)", v4, v4) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string token_url = 5; + v5 := compiler.MapValueForKey(m, "tokenUrl") + if v5 != nil { + x.TokenUrl, ok = v5.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for tokenUrl: %+v (%T)", v5, v5) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 6; + v6 := compiler.MapValueForKey(m, "description") + if v6 != nil { + x.Description, ok = v6.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v6, v6) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 7; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewOauth2ApplicationSecurity creates an object of type Oauth2ApplicationSecurity if possible, returning an error if not. +func NewOauth2ApplicationSecurity(in interface{}, context *compiler.Context) (*Oauth2ApplicationSecurity, error) { + errors := make([]error, 0) + x := &Oauth2ApplicationSecurity{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"flow", "tokenUrl", "type"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"description", "flow", "scopes", "tokenUrl", "type"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string type = 1; + v1 := compiler.MapValueForKey(m, "type") + if v1 != nil { + x.Type, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [oauth2] + if ok && !compiler.StringArrayContainsValue([]string{"oauth2"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string flow = 2; + v2 := compiler.MapValueForKey(m, "flow") + if v2 != nil { + x.Flow, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for flow: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [application] + if ok && !compiler.StringArrayContainsValue([]string{"application"}, x.Flow) { + message := fmt.Sprintf("has unexpected value for flow: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Oauth2Scopes scopes = 3; + v3 := compiler.MapValueForKey(m, "scopes") + if v3 != nil { + var err error + x.Scopes, err = NewOauth2Scopes(v3, compiler.NewContext("scopes", context)) + if err != nil { + errors = append(errors, err) + } + } + // string token_url = 4; + v4 := compiler.MapValueForKey(m, "tokenUrl") + if v4 != nil { + x.TokenUrl, ok = v4.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for tokenUrl: %+v (%T)", v4, v4) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 5; + v5 := compiler.MapValueForKey(m, "description") + if v5 != nil { + x.Description, ok = v5.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v5, v5) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 6; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewOauth2ImplicitSecurity creates an object of type Oauth2ImplicitSecurity if possible, returning an error if not. +func NewOauth2ImplicitSecurity(in interface{}, context *compiler.Context) (*Oauth2ImplicitSecurity, error) { + errors := make([]error, 0) + x := &Oauth2ImplicitSecurity{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"authorizationUrl", "flow", "type"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"authorizationUrl", "description", "flow", "scopes", "type"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string type = 1; + v1 := compiler.MapValueForKey(m, "type") + if v1 != nil { + x.Type, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [oauth2] + if ok && !compiler.StringArrayContainsValue([]string{"oauth2"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string flow = 2; + v2 := compiler.MapValueForKey(m, "flow") + if v2 != nil { + x.Flow, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for flow: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [implicit] + if ok && !compiler.StringArrayContainsValue([]string{"implicit"}, x.Flow) { + message := fmt.Sprintf("has unexpected value for flow: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Oauth2Scopes scopes = 3; + v3 := compiler.MapValueForKey(m, "scopes") + if v3 != nil { + var err error + x.Scopes, err = NewOauth2Scopes(v3, compiler.NewContext("scopes", context)) + if err != nil { + errors = append(errors, err) + } + } + // string authorization_url = 4; + v4 := compiler.MapValueForKey(m, "authorizationUrl") + if v4 != nil { + x.AuthorizationUrl, ok = v4.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for authorizationUrl: %+v (%T)", v4, v4) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 5; + v5 := compiler.MapValueForKey(m, "description") + if v5 != nil { + x.Description, ok = v5.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v5, v5) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 6; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewOauth2PasswordSecurity creates an object of type Oauth2PasswordSecurity if possible, returning an error if not. +func NewOauth2PasswordSecurity(in interface{}, context *compiler.Context) (*Oauth2PasswordSecurity, error) { + errors := make([]error, 0) + x := &Oauth2PasswordSecurity{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"flow", "tokenUrl", "type"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"description", "flow", "scopes", "tokenUrl", "type"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string type = 1; + v1 := compiler.MapValueForKey(m, "type") + if v1 != nil { + x.Type, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [oauth2] + if ok && !compiler.StringArrayContainsValue([]string{"oauth2"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string flow = 2; + v2 := compiler.MapValueForKey(m, "flow") + if v2 != nil { + x.Flow, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for flow: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [password] + if ok && !compiler.StringArrayContainsValue([]string{"password"}, x.Flow) { + message := fmt.Sprintf("has unexpected value for flow: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Oauth2Scopes scopes = 3; + v3 := compiler.MapValueForKey(m, "scopes") + if v3 != nil { + var err error + x.Scopes, err = NewOauth2Scopes(v3, compiler.NewContext("scopes", context)) + if err != nil { + errors = append(errors, err) + } + } + // string token_url = 4; + v4 := compiler.MapValueForKey(m, "tokenUrl") + if v4 != nil { + x.TokenUrl, ok = v4.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for tokenUrl: %+v (%T)", v4, v4) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 5; + v5 := compiler.MapValueForKey(m, "description") + if v5 != nil { + x.Description, ok = v5.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v5, v5) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 6; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewOauth2Scopes creates an object of type Oauth2Scopes if possible, returning an error if not. +func NewOauth2Scopes(in interface{}, context *compiler.Context) (*Oauth2Scopes, error) { + errors := make([]error, 0) + x := &Oauth2Scopes{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedString additional_properties = 1; + // MAP: string + x.AdditionalProperties = make([]*NamedString, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + pair := &NamedString{} + pair.Name = k + pair.Value = v.(string) + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewOperation creates an object of type Operation if possible, returning an error if not. +func NewOperation(in interface{}, context *compiler.Context) (*Operation, error) { + errors := make([]error, 0) + x := &Operation{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"responses"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"consumes", "deprecated", "description", "externalDocs", "operationId", "parameters", "produces", "responses", "schemes", "security", "summary", "tags"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // repeated string tags = 1; + v1 := compiler.MapValueForKey(m, "tags") + if v1 != nil { + v, ok := v1.([]interface{}) + if ok { + x.Tags = compiler.ConvertInterfaceArrayToStringArray(v) + } else { + message := fmt.Sprintf("has unexpected value for tags: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string summary = 2; + v2 := compiler.MapValueForKey(m, "summary") + if v2 != nil { + x.Summary, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for summary: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 3; + v3 := compiler.MapValueForKey(m, "description") + if v3 != nil { + x.Description, ok = v3.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v3, v3) + errors = append(errors, compiler.NewError(context, message)) + } + } + // ExternalDocs external_docs = 4; + v4 := compiler.MapValueForKey(m, "externalDocs") + if v4 != nil { + var err error + x.ExternalDocs, err = NewExternalDocs(v4, compiler.NewContext("externalDocs", context)) + if err != nil { + errors = append(errors, err) + } + } + // string operation_id = 5; + v5 := compiler.MapValueForKey(m, "operationId") + if v5 != nil { + x.OperationId, ok = v5.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for operationId: %+v (%T)", v5, v5) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated string produces = 6; + v6 := compiler.MapValueForKey(m, "produces") + if v6 != nil { + v, ok := v6.([]interface{}) + if ok { + x.Produces = compiler.ConvertInterfaceArrayToStringArray(v) + } else { + message := fmt.Sprintf("has unexpected value for produces: %+v (%T)", v6, v6) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated string consumes = 7; + v7 := compiler.MapValueForKey(m, "consumes") + if v7 != nil { + v, ok := v7.([]interface{}) + if ok { + x.Consumes = compiler.ConvertInterfaceArrayToStringArray(v) + } else { + message := fmt.Sprintf("has unexpected value for consumes: %+v (%T)", v7, v7) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated ParametersItem parameters = 8; + v8 := compiler.MapValueForKey(m, "parameters") + if v8 != nil { + // repeated ParametersItem + x.Parameters = make([]*ParametersItem, 0) + a, ok := v8.([]interface{}) + if ok { + for _, item := range a { + y, err := NewParametersItem(item, compiler.NewContext("parameters", context)) + if err != nil { + errors = append(errors, err) + } + x.Parameters = append(x.Parameters, y) + } + } + } + // Responses responses = 9; + v9 := compiler.MapValueForKey(m, "responses") + if v9 != nil { + var err error + x.Responses, err = NewResponses(v9, compiler.NewContext("responses", context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated string schemes = 10; + v10 := compiler.MapValueForKey(m, "schemes") + if v10 != nil { + v, ok := v10.([]interface{}) + if ok { + x.Schemes = compiler.ConvertInterfaceArrayToStringArray(v) + } else { + message := fmt.Sprintf("has unexpected value for schemes: %+v (%T)", v10, v10) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [http https ws wss] + if ok && !compiler.StringArrayContainsValues([]string{"http", "https", "ws", "wss"}, x.Schemes) { + message := fmt.Sprintf("has unexpected value for schemes: %+v", v10) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool deprecated = 11; + v11 := compiler.MapValueForKey(m, "deprecated") + if v11 != nil { + x.Deprecated, ok = v11.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for deprecated: %+v (%T)", v11, v11) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated SecurityRequirement security = 12; + v12 := compiler.MapValueForKey(m, "security") + if v12 != nil { + // repeated SecurityRequirement + x.Security = make([]*SecurityRequirement, 0) + a, ok := v12.([]interface{}) + if ok { + for _, item := range a { + y, err := NewSecurityRequirement(item, compiler.NewContext("security", context)) + if err != nil { + errors = append(errors, err) + } + x.Security = append(x.Security, y) + } + } + } + // repeated NamedAny vendor_extension = 13; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewParameter creates an object of type Parameter if possible, returning an error if not. +func NewParameter(in interface{}, context *compiler.Context) (*Parameter, error) { + errors := make([]error, 0) + x := &Parameter{} + matched := false + // BodyParameter body_parameter = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewBodyParameter(m, compiler.NewContext("bodyParameter", context)) + if matchingError == nil { + x.Oneof = &Parameter_BodyParameter{BodyParameter: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // NonBodyParameter non_body_parameter = 2; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewNonBodyParameter(m, compiler.NewContext("nonBodyParameter", context)) + if matchingError == nil { + x.Oneof = &Parameter_NonBodyParameter{NonBodyParameter: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewParameterDefinitions creates an object of type ParameterDefinitions if possible, returning an error if not. +func NewParameterDefinitions(in interface{}, context *compiler.Context) (*ParameterDefinitions, error) { + errors := make([]error, 0) + x := &ParameterDefinitions{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedParameter additional_properties = 1; + // MAP: Parameter + x.AdditionalProperties = make([]*NamedParameter, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + pair := &NamedParameter{} + pair.Name = k + var err error + pair.Value, err = NewParameter(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewParametersItem creates an object of type ParametersItem if possible, returning an error if not. +func NewParametersItem(in interface{}, context *compiler.Context) (*ParametersItem, error) { + errors := make([]error, 0) + x := &ParametersItem{} + matched := false + // Parameter parameter = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewParameter(m, compiler.NewContext("parameter", context)) + if matchingError == nil { + x.Oneof = &ParametersItem_Parameter{Parameter: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // JsonReference json_reference = 2; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewJsonReference(m, compiler.NewContext("jsonReference", context)) + if matchingError == nil { + x.Oneof = &ParametersItem_JsonReference{JsonReference: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewPathItem creates an object of type PathItem if possible, returning an error if not. +func NewPathItem(in interface{}, context *compiler.Context) (*PathItem, error) { + errors := make([]error, 0) + x := &PathItem{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"$ref", "delete", "get", "head", "options", "parameters", "patch", "post", "put"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string _ref = 1; + v1 := compiler.MapValueForKey(m, "$ref") + if v1 != nil { + x.XRef, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for $ref: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Operation get = 2; + v2 := compiler.MapValueForKey(m, "get") + if v2 != nil { + var err error + x.Get, err = NewOperation(v2, compiler.NewContext("get", context)) + if err != nil { + errors = append(errors, err) + } + } + // Operation put = 3; + v3 := compiler.MapValueForKey(m, "put") + if v3 != nil { + var err error + x.Put, err = NewOperation(v3, compiler.NewContext("put", context)) + if err != nil { + errors = append(errors, err) + } + } + // Operation post = 4; + v4 := compiler.MapValueForKey(m, "post") + if v4 != nil { + var err error + x.Post, err = NewOperation(v4, compiler.NewContext("post", context)) + if err != nil { + errors = append(errors, err) + } + } + // Operation delete = 5; + v5 := compiler.MapValueForKey(m, "delete") + if v5 != nil { + var err error + x.Delete, err = NewOperation(v5, compiler.NewContext("delete", context)) + if err != nil { + errors = append(errors, err) + } + } + // Operation options = 6; + v6 := compiler.MapValueForKey(m, "options") + if v6 != nil { + var err error + x.Options, err = NewOperation(v6, compiler.NewContext("options", context)) + if err != nil { + errors = append(errors, err) + } + } + // Operation head = 7; + v7 := compiler.MapValueForKey(m, "head") + if v7 != nil { + var err error + x.Head, err = NewOperation(v7, compiler.NewContext("head", context)) + if err != nil { + errors = append(errors, err) + } + } + // Operation patch = 8; + v8 := compiler.MapValueForKey(m, "patch") + if v8 != nil { + var err error + x.Patch, err = NewOperation(v8, compiler.NewContext("patch", context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated ParametersItem parameters = 9; + v9 := compiler.MapValueForKey(m, "parameters") + if v9 != nil { + // repeated ParametersItem + x.Parameters = make([]*ParametersItem, 0) + a, ok := v9.([]interface{}) + if ok { + for _, item := range a { + y, err := NewParametersItem(item, compiler.NewContext("parameters", context)) + if err != nil { + errors = append(errors, err) + } + x.Parameters = append(x.Parameters, y) + } + } + } + // repeated NamedAny vendor_extension = 10; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewPathParameterSubSchema creates an object of type PathParameterSubSchema if possible, returning an error if not. +func NewPathParameterSubSchema(in interface{}, context *compiler.Context) (*PathParameterSubSchema, error) { + errors := make([]error, 0) + x := &PathParameterSubSchema{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"required"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"collectionFormat", "default", "description", "enum", "exclusiveMaximum", "exclusiveMinimum", "format", "in", "items", "maxItems", "maxLength", "maximum", "minItems", "minLength", "minimum", "multipleOf", "name", "pattern", "required", "type", "uniqueItems"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // bool required = 1; + v1 := compiler.MapValueForKey(m, "required") + if v1 != nil { + x.Required, ok = v1.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for required: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string in = 2; + v2 := compiler.MapValueForKey(m, "in") + if v2 != nil { + x.In, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for in: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [path] + if ok && !compiler.StringArrayContainsValue([]string{"path"}, x.In) { + message := fmt.Sprintf("has unexpected value for in: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 3; + v3 := compiler.MapValueForKey(m, "description") + if v3 != nil { + x.Description, ok = v3.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v3, v3) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string name = 4; + v4 := compiler.MapValueForKey(m, "name") + if v4 != nil { + x.Name, ok = v4.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v4, v4) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string type = 5; + v5 := compiler.MapValueForKey(m, "type") + if v5 != nil { + x.Type, ok = v5.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %+v (%T)", v5, v5) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [string number boolean integer array] + if ok && !compiler.StringArrayContainsValue([]string{"string", "number", "boolean", "integer", "array"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %+v (%T)", v5, v5) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string format = 6; + v6 := compiler.MapValueForKey(m, "format") + if v6 != nil { + x.Format, ok = v6.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for format: %+v (%T)", v6, v6) + errors = append(errors, compiler.NewError(context, message)) + } + } + // PrimitivesItems items = 7; + v7 := compiler.MapValueForKey(m, "items") + if v7 != nil { + var err error + x.Items, err = NewPrimitivesItems(v7, compiler.NewContext("items", context)) + if err != nil { + errors = append(errors, err) + } + } + // string collection_format = 8; + v8 := compiler.MapValueForKey(m, "collectionFormat") + if v8 != nil { + x.CollectionFormat, ok = v8.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for collectionFormat: %+v (%T)", v8, v8) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [csv ssv tsv pipes] + if ok && !compiler.StringArrayContainsValue([]string{"csv", "ssv", "tsv", "pipes"}, x.CollectionFormat) { + message := fmt.Sprintf("has unexpected value for collectionFormat: %+v (%T)", v8, v8) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Any default = 9; + v9 := compiler.MapValueForKey(m, "default") + if v9 != nil { + var err error + x.Default, err = NewAny(v9, compiler.NewContext("default", context)) + if err != nil { + errors = append(errors, err) + } + } + // float maximum = 10; + v10 := compiler.MapValueForKey(m, "maximum") + if v10 != nil { + switch v10 := v10.(type) { + case float64: + x.Maximum = v10 + case float32: + x.Maximum = float64(v10) + case uint64: + x.Maximum = float64(v10) + case uint32: + x.Maximum = float64(v10) + case int64: + x.Maximum = float64(v10) + case int32: + x.Maximum = float64(v10) + case int: + x.Maximum = float64(v10) + default: + message := fmt.Sprintf("has unexpected value for maximum: %+v (%T)", v10, v10) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_maximum = 11; + v11 := compiler.MapValueForKey(m, "exclusiveMaximum") + if v11 != nil { + x.ExclusiveMaximum, ok = v11.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMaximum: %+v (%T)", v11, v11) + errors = append(errors, compiler.NewError(context, message)) + } + } + // float minimum = 12; + v12 := compiler.MapValueForKey(m, "minimum") + if v12 != nil { + switch v12 := v12.(type) { + case float64: + x.Minimum = v12 + case float32: + x.Minimum = float64(v12) + case uint64: + x.Minimum = float64(v12) + case uint32: + x.Minimum = float64(v12) + case int64: + x.Minimum = float64(v12) + case int32: + x.Minimum = float64(v12) + case int: + x.Minimum = float64(v12) + default: + message := fmt.Sprintf("has unexpected value for minimum: %+v (%T)", v12, v12) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_minimum = 13; + v13 := compiler.MapValueForKey(m, "exclusiveMinimum") + if v13 != nil { + x.ExclusiveMinimum, ok = v13.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMinimum: %+v (%T)", v13, v13) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_length = 14; + v14 := compiler.MapValueForKey(m, "maxLength") + if v14 != nil { + t, ok := v14.(int) + if ok { + x.MaxLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxLength: %+v (%T)", v14, v14) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_length = 15; + v15 := compiler.MapValueForKey(m, "minLength") + if v15 != nil { + t, ok := v15.(int) + if ok { + x.MinLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minLength: %+v (%T)", v15, v15) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string pattern = 16; + v16 := compiler.MapValueForKey(m, "pattern") + if v16 != nil { + x.Pattern, ok = v16.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for pattern: %+v (%T)", v16, v16) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_items = 17; + v17 := compiler.MapValueForKey(m, "maxItems") + if v17 != nil { + t, ok := v17.(int) + if ok { + x.MaxItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxItems: %+v (%T)", v17, v17) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_items = 18; + v18 := compiler.MapValueForKey(m, "minItems") + if v18 != nil { + t, ok := v18.(int) + if ok { + x.MinItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minItems: %+v (%T)", v18, v18) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool unique_items = 19; + v19 := compiler.MapValueForKey(m, "uniqueItems") + if v19 != nil { + x.UniqueItems, ok = v19.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for uniqueItems: %+v (%T)", v19, v19) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated Any enum = 20; + v20 := compiler.MapValueForKey(m, "enum") + if v20 != nil { + // repeated Any + x.Enum = make([]*Any, 0) + a, ok := v20.([]interface{}) + if ok { + for _, item := range a { + y, err := NewAny(item, compiler.NewContext("enum", context)) + if err != nil { + errors = append(errors, err) + } + x.Enum = append(x.Enum, y) + } + } + } + // float multiple_of = 21; + v21 := compiler.MapValueForKey(m, "multipleOf") + if v21 != nil { + switch v21 := v21.(type) { + case float64: + x.MultipleOf = v21 + case float32: + x.MultipleOf = float64(v21) + case uint64: + x.MultipleOf = float64(v21) + case uint32: + x.MultipleOf = float64(v21) + case int64: + x.MultipleOf = float64(v21) + case int32: + x.MultipleOf = float64(v21) + case int: + x.MultipleOf = float64(v21) + default: + message := fmt.Sprintf("has unexpected value for multipleOf: %+v (%T)", v21, v21) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 22; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewPaths creates an object of type Paths if possible, returning an error if not. +func NewPaths(in interface{}, context *compiler.Context) (*Paths, error) { + errors := make([]error, 0) + x := &Paths{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{} + allowedPatterns := []*regexp.Regexp{pattern0, pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // repeated NamedAny vendor_extension = 1; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + // repeated NamedPathItem path = 2; + // MAP: PathItem ^/ + x.Path = make([]*NamedPathItem, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "/") { + pair := &NamedPathItem{} + pair.Name = k + var err error + pair.Value, err = NewPathItem(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + x.Path = append(x.Path, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewPrimitivesItems creates an object of type PrimitivesItems if possible, returning an error if not. +func NewPrimitivesItems(in interface{}, context *compiler.Context) (*PrimitivesItems, error) { + errors := make([]error, 0) + x := &PrimitivesItems{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"collectionFormat", "default", "enum", "exclusiveMaximum", "exclusiveMinimum", "format", "items", "maxItems", "maxLength", "maximum", "minItems", "minLength", "minimum", "multipleOf", "pattern", "type", "uniqueItems"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string type = 1; + v1 := compiler.MapValueForKey(m, "type") + if v1 != nil { + x.Type, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [string number integer boolean array] + if ok && !compiler.StringArrayContainsValue([]string{"string", "number", "integer", "boolean", "array"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string format = 2; + v2 := compiler.MapValueForKey(m, "format") + if v2 != nil { + x.Format, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for format: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // PrimitivesItems items = 3; + v3 := compiler.MapValueForKey(m, "items") + if v3 != nil { + var err error + x.Items, err = NewPrimitivesItems(v3, compiler.NewContext("items", context)) + if err != nil { + errors = append(errors, err) + } + } + // string collection_format = 4; + v4 := compiler.MapValueForKey(m, "collectionFormat") + if v4 != nil { + x.CollectionFormat, ok = v4.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for collectionFormat: %+v (%T)", v4, v4) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [csv ssv tsv pipes] + if ok && !compiler.StringArrayContainsValue([]string{"csv", "ssv", "tsv", "pipes"}, x.CollectionFormat) { + message := fmt.Sprintf("has unexpected value for collectionFormat: %+v (%T)", v4, v4) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Any default = 5; + v5 := compiler.MapValueForKey(m, "default") + if v5 != nil { + var err error + x.Default, err = NewAny(v5, compiler.NewContext("default", context)) + if err != nil { + errors = append(errors, err) + } + } + // float maximum = 6; + v6 := compiler.MapValueForKey(m, "maximum") + if v6 != nil { + switch v6 := v6.(type) { + case float64: + x.Maximum = v6 + case float32: + x.Maximum = float64(v6) + case uint64: + x.Maximum = float64(v6) + case uint32: + x.Maximum = float64(v6) + case int64: + x.Maximum = float64(v6) + case int32: + x.Maximum = float64(v6) + case int: + x.Maximum = float64(v6) + default: + message := fmt.Sprintf("has unexpected value for maximum: %+v (%T)", v6, v6) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_maximum = 7; + v7 := compiler.MapValueForKey(m, "exclusiveMaximum") + if v7 != nil { + x.ExclusiveMaximum, ok = v7.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMaximum: %+v (%T)", v7, v7) + errors = append(errors, compiler.NewError(context, message)) + } + } + // float minimum = 8; + v8 := compiler.MapValueForKey(m, "minimum") + if v8 != nil { + switch v8 := v8.(type) { + case float64: + x.Minimum = v8 + case float32: + x.Minimum = float64(v8) + case uint64: + x.Minimum = float64(v8) + case uint32: + x.Minimum = float64(v8) + case int64: + x.Minimum = float64(v8) + case int32: + x.Minimum = float64(v8) + case int: + x.Minimum = float64(v8) + default: + message := fmt.Sprintf("has unexpected value for minimum: %+v (%T)", v8, v8) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_minimum = 9; + v9 := compiler.MapValueForKey(m, "exclusiveMinimum") + if v9 != nil { + x.ExclusiveMinimum, ok = v9.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMinimum: %+v (%T)", v9, v9) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_length = 10; + v10 := compiler.MapValueForKey(m, "maxLength") + if v10 != nil { + t, ok := v10.(int) + if ok { + x.MaxLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxLength: %+v (%T)", v10, v10) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_length = 11; + v11 := compiler.MapValueForKey(m, "minLength") + if v11 != nil { + t, ok := v11.(int) + if ok { + x.MinLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minLength: %+v (%T)", v11, v11) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string pattern = 12; + v12 := compiler.MapValueForKey(m, "pattern") + if v12 != nil { + x.Pattern, ok = v12.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for pattern: %+v (%T)", v12, v12) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_items = 13; + v13 := compiler.MapValueForKey(m, "maxItems") + if v13 != nil { + t, ok := v13.(int) + if ok { + x.MaxItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxItems: %+v (%T)", v13, v13) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_items = 14; + v14 := compiler.MapValueForKey(m, "minItems") + if v14 != nil { + t, ok := v14.(int) + if ok { + x.MinItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minItems: %+v (%T)", v14, v14) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool unique_items = 15; + v15 := compiler.MapValueForKey(m, "uniqueItems") + if v15 != nil { + x.UniqueItems, ok = v15.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for uniqueItems: %+v (%T)", v15, v15) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated Any enum = 16; + v16 := compiler.MapValueForKey(m, "enum") + if v16 != nil { + // repeated Any + x.Enum = make([]*Any, 0) + a, ok := v16.([]interface{}) + if ok { + for _, item := range a { + y, err := NewAny(item, compiler.NewContext("enum", context)) + if err != nil { + errors = append(errors, err) + } + x.Enum = append(x.Enum, y) + } + } + } + // float multiple_of = 17; + v17 := compiler.MapValueForKey(m, "multipleOf") + if v17 != nil { + switch v17 := v17.(type) { + case float64: + x.MultipleOf = v17 + case float32: + x.MultipleOf = float64(v17) + case uint64: + x.MultipleOf = float64(v17) + case uint32: + x.MultipleOf = float64(v17) + case int64: + x.MultipleOf = float64(v17) + case int32: + x.MultipleOf = float64(v17) + case int: + x.MultipleOf = float64(v17) + default: + message := fmt.Sprintf("has unexpected value for multipleOf: %+v (%T)", v17, v17) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 18; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewProperties creates an object of type Properties if possible, returning an error if not. +func NewProperties(in interface{}, context *compiler.Context) (*Properties, error) { + errors := make([]error, 0) + x := &Properties{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedSchema additional_properties = 1; + // MAP: Schema + x.AdditionalProperties = make([]*NamedSchema, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + pair := &NamedSchema{} + pair.Name = k + var err error + pair.Value, err = NewSchema(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewQueryParameterSubSchema creates an object of type QueryParameterSubSchema if possible, returning an error if not. +func NewQueryParameterSubSchema(in interface{}, context *compiler.Context) (*QueryParameterSubSchema, error) { + errors := make([]error, 0) + x := &QueryParameterSubSchema{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"allowEmptyValue", "collectionFormat", "default", "description", "enum", "exclusiveMaximum", "exclusiveMinimum", "format", "in", "items", "maxItems", "maxLength", "maximum", "minItems", "minLength", "minimum", "multipleOf", "name", "pattern", "required", "type", "uniqueItems"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // bool required = 1; + v1 := compiler.MapValueForKey(m, "required") + if v1 != nil { + x.Required, ok = v1.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for required: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string in = 2; + v2 := compiler.MapValueForKey(m, "in") + if v2 != nil { + x.In, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for in: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [query] + if ok && !compiler.StringArrayContainsValue([]string{"query"}, x.In) { + message := fmt.Sprintf("has unexpected value for in: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 3; + v3 := compiler.MapValueForKey(m, "description") + if v3 != nil { + x.Description, ok = v3.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v3, v3) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string name = 4; + v4 := compiler.MapValueForKey(m, "name") + if v4 != nil { + x.Name, ok = v4.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v4, v4) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool allow_empty_value = 5; + v5 := compiler.MapValueForKey(m, "allowEmptyValue") + if v5 != nil { + x.AllowEmptyValue, ok = v5.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for allowEmptyValue: %+v (%T)", v5, v5) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string type = 6; + v6 := compiler.MapValueForKey(m, "type") + if v6 != nil { + x.Type, ok = v6.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %+v (%T)", v6, v6) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [string number boolean integer array] + if ok && !compiler.StringArrayContainsValue([]string{"string", "number", "boolean", "integer", "array"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %+v (%T)", v6, v6) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string format = 7; + v7 := compiler.MapValueForKey(m, "format") + if v7 != nil { + x.Format, ok = v7.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for format: %+v (%T)", v7, v7) + errors = append(errors, compiler.NewError(context, message)) + } + } + // PrimitivesItems items = 8; + v8 := compiler.MapValueForKey(m, "items") + if v8 != nil { + var err error + x.Items, err = NewPrimitivesItems(v8, compiler.NewContext("items", context)) + if err != nil { + errors = append(errors, err) + } + } + // string collection_format = 9; + v9 := compiler.MapValueForKey(m, "collectionFormat") + if v9 != nil { + x.CollectionFormat, ok = v9.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for collectionFormat: %+v (%T)", v9, v9) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [csv ssv tsv pipes multi] + if ok && !compiler.StringArrayContainsValue([]string{"csv", "ssv", "tsv", "pipes", "multi"}, x.CollectionFormat) { + message := fmt.Sprintf("has unexpected value for collectionFormat: %+v (%T)", v9, v9) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Any default = 10; + v10 := compiler.MapValueForKey(m, "default") + if v10 != nil { + var err error + x.Default, err = NewAny(v10, compiler.NewContext("default", context)) + if err != nil { + errors = append(errors, err) + } + } + // float maximum = 11; + v11 := compiler.MapValueForKey(m, "maximum") + if v11 != nil { + switch v11 := v11.(type) { + case float64: + x.Maximum = v11 + case float32: + x.Maximum = float64(v11) + case uint64: + x.Maximum = float64(v11) + case uint32: + x.Maximum = float64(v11) + case int64: + x.Maximum = float64(v11) + case int32: + x.Maximum = float64(v11) + case int: + x.Maximum = float64(v11) + default: + message := fmt.Sprintf("has unexpected value for maximum: %+v (%T)", v11, v11) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_maximum = 12; + v12 := compiler.MapValueForKey(m, "exclusiveMaximum") + if v12 != nil { + x.ExclusiveMaximum, ok = v12.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMaximum: %+v (%T)", v12, v12) + errors = append(errors, compiler.NewError(context, message)) + } + } + // float minimum = 13; + v13 := compiler.MapValueForKey(m, "minimum") + if v13 != nil { + switch v13 := v13.(type) { + case float64: + x.Minimum = v13 + case float32: + x.Minimum = float64(v13) + case uint64: + x.Minimum = float64(v13) + case uint32: + x.Minimum = float64(v13) + case int64: + x.Minimum = float64(v13) + case int32: + x.Minimum = float64(v13) + case int: + x.Minimum = float64(v13) + default: + message := fmt.Sprintf("has unexpected value for minimum: %+v (%T)", v13, v13) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_minimum = 14; + v14 := compiler.MapValueForKey(m, "exclusiveMinimum") + if v14 != nil { + x.ExclusiveMinimum, ok = v14.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMinimum: %+v (%T)", v14, v14) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_length = 15; + v15 := compiler.MapValueForKey(m, "maxLength") + if v15 != nil { + t, ok := v15.(int) + if ok { + x.MaxLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxLength: %+v (%T)", v15, v15) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_length = 16; + v16 := compiler.MapValueForKey(m, "minLength") + if v16 != nil { + t, ok := v16.(int) + if ok { + x.MinLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minLength: %+v (%T)", v16, v16) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string pattern = 17; + v17 := compiler.MapValueForKey(m, "pattern") + if v17 != nil { + x.Pattern, ok = v17.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for pattern: %+v (%T)", v17, v17) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_items = 18; + v18 := compiler.MapValueForKey(m, "maxItems") + if v18 != nil { + t, ok := v18.(int) + if ok { + x.MaxItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxItems: %+v (%T)", v18, v18) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_items = 19; + v19 := compiler.MapValueForKey(m, "minItems") + if v19 != nil { + t, ok := v19.(int) + if ok { + x.MinItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minItems: %+v (%T)", v19, v19) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool unique_items = 20; + v20 := compiler.MapValueForKey(m, "uniqueItems") + if v20 != nil { + x.UniqueItems, ok = v20.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for uniqueItems: %+v (%T)", v20, v20) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated Any enum = 21; + v21 := compiler.MapValueForKey(m, "enum") + if v21 != nil { + // repeated Any + x.Enum = make([]*Any, 0) + a, ok := v21.([]interface{}) + if ok { + for _, item := range a { + y, err := NewAny(item, compiler.NewContext("enum", context)) + if err != nil { + errors = append(errors, err) + } + x.Enum = append(x.Enum, y) + } + } + } + // float multiple_of = 22; + v22 := compiler.MapValueForKey(m, "multipleOf") + if v22 != nil { + switch v22 := v22.(type) { + case float64: + x.MultipleOf = v22 + case float32: + x.MultipleOf = float64(v22) + case uint64: + x.MultipleOf = float64(v22) + case uint32: + x.MultipleOf = float64(v22) + case int64: + x.MultipleOf = float64(v22) + case int32: + x.MultipleOf = float64(v22) + case int: + x.MultipleOf = float64(v22) + default: + message := fmt.Sprintf("has unexpected value for multipleOf: %+v (%T)", v22, v22) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 23; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewResponse creates an object of type Response if possible, returning an error if not. +func NewResponse(in interface{}, context *compiler.Context) (*Response, error) { + errors := make([]error, 0) + x := &Response{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"description"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"description", "examples", "headers", "schema"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string description = 1; + v1 := compiler.MapValueForKey(m, "description") + if v1 != nil { + x.Description, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // SchemaItem schema = 2; + v2 := compiler.MapValueForKey(m, "schema") + if v2 != nil { + var err error + x.Schema, err = NewSchemaItem(v2, compiler.NewContext("schema", context)) + if err != nil { + errors = append(errors, err) + } + } + // Headers headers = 3; + v3 := compiler.MapValueForKey(m, "headers") + if v3 != nil { + var err error + x.Headers, err = NewHeaders(v3, compiler.NewContext("headers", context)) + if err != nil { + errors = append(errors, err) + } + } + // Examples examples = 4; + v4 := compiler.MapValueForKey(m, "examples") + if v4 != nil { + var err error + x.Examples, err = NewExamples(v4, compiler.NewContext("examples", context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny vendor_extension = 5; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewResponseDefinitions creates an object of type ResponseDefinitions if possible, returning an error if not. +func NewResponseDefinitions(in interface{}, context *compiler.Context) (*ResponseDefinitions, error) { + errors := make([]error, 0) + x := &ResponseDefinitions{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedResponse additional_properties = 1; + // MAP: Response + x.AdditionalProperties = make([]*NamedResponse, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + pair := &NamedResponse{} + pair.Name = k + var err error + pair.Value, err = NewResponse(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewResponseValue creates an object of type ResponseValue if possible, returning an error if not. +func NewResponseValue(in interface{}, context *compiler.Context) (*ResponseValue, error) { + errors := make([]error, 0) + x := &ResponseValue{} + matched := false + // Response response = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewResponse(m, compiler.NewContext("response", context)) + if matchingError == nil { + x.Oneof = &ResponseValue_Response{Response: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // JsonReference json_reference = 2; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewJsonReference(m, compiler.NewContext("jsonReference", context)) + if matchingError == nil { + x.Oneof = &ResponseValue_JsonReference{JsonReference: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewResponses creates an object of type Responses if possible, returning an error if not. +func NewResponses(in interface{}, context *compiler.Context) (*Responses, error) { + errors := make([]error, 0) + x := &Responses{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{} + allowedPatterns := []*regexp.Regexp{pattern2, pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // repeated NamedResponseValue response_code = 1; + // MAP: ResponseValue ^([0-9]{3})$|^(default)$ + x.ResponseCode = make([]*NamedResponseValue, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if pattern2.MatchString(k) { + pair := &NamedResponseValue{} + pair.Name = k + var err error + pair.Value, err = NewResponseValue(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + x.ResponseCode = append(x.ResponseCode, pair) + } + } + } + // repeated NamedAny vendor_extension = 2; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewSchema creates an object of type Schema if possible, returning an error if not. +func NewSchema(in interface{}, context *compiler.Context) (*Schema, error) { + errors := make([]error, 0) + x := &Schema{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"$ref", "additionalProperties", "allOf", "default", "description", "discriminator", "enum", "example", "exclusiveMaximum", "exclusiveMinimum", "externalDocs", "format", "items", "maxItems", "maxLength", "maxProperties", "maximum", "minItems", "minLength", "minProperties", "minimum", "multipleOf", "pattern", "properties", "readOnly", "required", "title", "type", "uniqueItems", "xml"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string _ref = 1; + v1 := compiler.MapValueForKey(m, "$ref") + if v1 != nil { + x.XRef, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for $ref: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string format = 2; + v2 := compiler.MapValueForKey(m, "format") + if v2 != nil { + x.Format, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for format: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string title = 3; + v3 := compiler.MapValueForKey(m, "title") + if v3 != nil { + x.Title, ok = v3.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for title: %+v (%T)", v3, v3) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 4; + v4 := compiler.MapValueForKey(m, "description") + if v4 != nil { + x.Description, ok = v4.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v4, v4) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Any default = 5; + v5 := compiler.MapValueForKey(m, "default") + if v5 != nil { + var err error + x.Default, err = NewAny(v5, compiler.NewContext("default", context)) + if err != nil { + errors = append(errors, err) + } + } + // float multiple_of = 6; + v6 := compiler.MapValueForKey(m, "multipleOf") + if v6 != nil { + switch v6 := v6.(type) { + case float64: + x.MultipleOf = v6 + case float32: + x.MultipleOf = float64(v6) + case uint64: + x.MultipleOf = float64(v6) + case uint32: + x.MultipleOf = float64(v6) + case int64: + x.MultipleOf = float64(v6) + case int32: + x.MultipleOf = float64(v6) + case int: + x.MultipleOf = float64(v6) + default: + message := fmt.Sprintf("has unexpected value for multipleOf: %+v (%T)", v6, v6) + errors = append(errors, compiler.NewError(context, message)) + } + } + // float maximum = 7; + v7 := compiler.MapValueForKey(m, "maximum") + if v7 != nil { + switch v7 := v7.(type) { + case float64: + x.Maximum = v7 + case float32: + x.Maximum = float64(v7) + case uint64: + x.Maximum = float64(v7) + case uint32: + x.Maximum = float64(v7) + case int64: + x.Maximum = float64(v7) + case int32: + x.Maximum = float64(v7) + case int: + x.Maximum = float64(v7) + default: + message := fmt.Sprintf("has unexpected value for maximum: %+v (%T)", v7, v7) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_maximum = 8; + v8 := compiler.MapValueForKey(m, "exclusiveMaximum") + if v8 != nil { + x.ExclusiveMaximum, ok = v8.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMaximum: %+v (%T)", v8, v8) + errors = append(errors, compiler.NewError(context, message)) + } + } + // float minimum = 9; + v9 := compiler.MapValueForKey(m, "minimum") + if v9 != nil { + switch v9 := v9.(type) { + case float64: + x.Minimum = v9 + case float32: + x.Minimum = float64(v9) + case uint64: + x.Minimum = float64(v9) + case uint32: + x.Minimum = float64(v9) + case int64: + x.Minimum = float64(v9) + case int32: + x.Minimum = float64(v9) + case int: + x.Minimum = float64(v9) + default: + message := fmt.Sprintf("has unexpected value for minimum: %+v (%T)", v9, v9) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_minimum = 10; + v10 := compiler.MapValueForKey(m, "exclusiveMinimum") + if v10 != nil { + x.ExclusiveMinimum, ok = v10.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMinimum: %+v (%T)", v10, v10) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_length = 11; + v11 := compiler.MapValueForKey(m, "maxLength") + if v11 != nil { + t, ok := v11.(int) + if ok { + x.MaxLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxLength: %+v (%T)", v11, v11) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_length = 12; + v12 := compiler.MapValueForKey(m, "minLength") + if v12 != nil { + t, ok := v12.(int) + if ok { + x.MinLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minLength: %+v (%T)", v12, v12) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string pattern = 13; + v13 := compiler.MapValueForKey(m, "pattern") + if v13 != nil { + x.Pattern, ok = v13.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for pattern: %+v (%T)", v13, v13) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_items = 14; + v14 := compiler.MapValueForKey(m, "maxItems") + if v14 != nil { + t, ok := v14.(int) + if ok { + x.MaxItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxItems: %+v (%T)", v14, v14) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_items = 15; + v15 := compiler.MapValueForKey(m, "minItems") + if v15 != nil { + t, ok := v15.(int) + if ok { + x.MinItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minItems: %+v (%T)", v15, v15) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool unique_items = 16; + v16 := compiler.MapValueForKey(m, "uniqueItems") + if v16 != nil { + x.UniqueItems, ok = v16.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for uniqueItems: %+v (%T)", v16, v16) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_properties = 17; + v17 := compiler.MapValueForKey(m, "maxProperties") + if v17 != nil { + t, ok := v17.(int) + if ok { + x.MaxProperties = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxProperties: %+v (%T)", v17, v17) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_properties = 18; + v18 := compiler.MapValueForKey(m, "minProperties") + if v18 != nil { + t, ok := v18.(int) + if ok { + x.MinProperties = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minProperties: %+v (%T)", v18, v18) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated string required = 19; + v19 := compiler.MapValueForKey(m, "required") + if v19 != nil { + v, ok := v19.([]interface{}) + if ok { + x.Required = compiler.ConvertInterfaceArrayToStringArray(v) + } else { + message := fmt.Sprintf("has unexpected value for required: %+v (%T)", v19, v19) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated Any enum = 20; + v20 := compiler.MapValueForKey(m, "enum") + if v20 != nil { + // repeated Any + x.Enum = make([]*Any, 0) + a, ok := v20.([]interface{}) + if ok { + for _, item := range a { + y, err := NewAny(item, compiler.NewContext("enum", context)) + if err != nil { + errors = append(errors, err) + } + x.Enum = append(x.Enum, y) + } + } + } + // AdditionalPropertiesItem additional_properties = 21; + v21 := compiler.MapValueForKey(m, "additionalProperties") + if v21 != nil { + var err error + x.AdditionalProperties, err = NewAdditionalPropertiesItem(v21, compiler.NewContext("additionalProperties", context)) + if err != nil { + errors = append(errors, err) + } + } + // TypeItem type = 22; + v22 := compiler.MapValueForKey(m, "type") + if v22 != nil { + var err error + x.Type, err = NewTypeItem(v22, compiler.NewContext("type", context)) + if err != nil { + errors = append(errors, err) + } + } + // ItemsItem items = 23; + v23 := compiler.MapValueForKey(m, "items") + if v23 != nil { + var err error + x.Items, err = NewItemsItem(v23, compiler.NewContext("items", context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated Schema all_of = 24; + v24 := compiler.MapValueForKey(m, "allOf") + if v24 != nil { + // repeated Schema + x.AllOf = make([]*Schema, 0) + a, ok := v24.([]interface{}) + if ok { + for _, item := range a { + y, err := NewSchema(item, compiler.NewContext("allOf", context)) + if err != nil { + errors = append(errors, err) + } + x.AllOf = append(x.AllOf, y) + } + } + } + // Properties properties = 25; + v25 := compiler.MapValueForKey(m, "properties") + if v25 != nil { + var err error + x.Properties, err = NewProperties(v25, compiler.NewContext("properties", context)) + if err != nil { + errors = append(errors, err) + } + } + // string discriminator = 26; + v26 := compiler.MapValueForKey(m, "discriminator") + if v26 != nil { + x.Discriminator, ok = v26.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for discriminator: %+v (%T)", v26, v26) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool read_only = 27; + v27 := compiler.MapValueForKey(m, "readOnly") + if v27 != nil { + x.ReadOnly, ok = v27.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for readOnly: %+v (%T)", v27, v27) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Xml xml = 28; + v28 := compiler.MapValueForKey(m, "xml") + if v28 != nil { + var err error + x.Xml, err = NewXml(v28, compiler.NewContext("xml", context)) + if err != nil { + errors = append(errors, err) + } + } + // ExternalDocs external_docs = 29; + v29 := compiler.MapValueForKey(m, "externalDocs") + if v29 != nil { + var err error + x.ExternalDocs, err = NewExternalDocs(v29, compiler.NewContext("externalDocs", context)) + if err != nil { + errors = append(errors, err) + } + } + // Any example = 30; + v30 := compiler.MapValueForKey(m, "example") + if v30 != nil { + var err error + x.Example, err = NewAny(v30, compiler.NewContext("example", context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny vendor_extension = 31; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewSchemaItem creates an object of type SchemaItem if possible, returning an error if not. +func NewSchemaItem(in interface{}, context *compiler.Context) (*SchemaItem, error) { + errors := make([]error, 0) + x := &SchemaItem{} + matched := false + // Schema schema = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewSchema(m, compiler.NewContext("schema", context)) + if matchingError == nil { + x.Oneof = &SchemaItem_Schema{Schema: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // FileSchema file_schema = 2; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewFileSchema(m, compiler.NewContext("fileSchema", context)) + if matchingError == nil { + x.Oneof = &SchemaItem_FileSchema{FileSchema: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewSecurityDefinitions creates an object of type SecurityDefinitions if possible, returning an error if not. +func NewSecurityDefinitions(in interface{}, context *compiler.Context) (*SecurityDefinitions, error) { + errors := make([]error, 0) + x := &SecurityDefinitions{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedSecurityDefinitionsItem additional_properties = 1; + // MAP: SecurityDefinitionsItem + x.AdditionalProperties = make([]*NamedSecurityDefinitionsItem, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + pair := &NamedSecurityDefinitionsItem{} + pair.Name = k + var err error + pair.Value, err = NewSecurityDefinitionsItem(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewSecurityDefinitionsItem creates an object of type SecurityDefinitionsItem if possible, returning an error if not. +func NewSecurityDefinitionsItem(in interface{}, context *compiler.Context) (*SecurityDefinitionsItem, error) { + errors := make([]error, 0) + x := &SecurityDefinitionsItem{} + matched := false + // BasicAuthenticationSecurity basic_authentication_security = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewBasicAuthenticationSecurity(m, compiler.NewContext("basicAuthenticationSecurity", context)) + if matchingError == nil { + x.Oneof = &SecurityDefinitionsItem_BasicAuthenticationSecurity{BasicAuthenticationSecurity: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // ApiKeySecurity api_key_security = 2; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewApiKeySecurity(m, compiler.NewContext("apiKeySecurity", context)) + if matchingError == nil { + x.Oneof = &SecurityDefinitionsItem_ApiKeySecurity{ApiKeySecurity: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // Oauth2ImplicitSecurity oauth2_implicit_security = 3; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewOauth2ImplicitSecurity(m, compiler.NewContext("oauth2ImplicitSecurity", context)) + if matchingError == nil { + x.Oneof = &SecurityDefinitionsItem_Oauth2ImplicitSecurity{Oauth2ImplicitSecurity: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // Oauth2PasswordSecurity oauth2_password_security = 4; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewOauth2PasswordSecurity(m, compiler.NewContext("oauth2PasswordSecurity", context)) + if matchingError == nil { + x.Oneof = &SecurityDefinitionsItem_Oauth2PasswordSecurity{Oauth2PasswordSecurity: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // Oauth2ApplicationSecurity oauth2_application_security = 5; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewOauth2ApplicationSecurity(m, compiler.NewContext("oauth2ApplicationSecurity", context)) + if matchingError == nil { + x.Oneof = &SecurityDefinitionsItem_Oauth2ApplicationSecurity{Oauth2ApplicationSecurity: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // Oauth2AccessCodeSecurity oauth2_access_code_security = 6; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewOauth2AccessCodeSecurity(m, compiler.NewContext("oauth2AccessCodeSecurity", context)) + if matchingError == nil { + x.Oneof = &SecurityDefinitionsItem_Oauth2AccessCodeSecurity{Oauth2AccessCodeSecurity: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewSecurityRequirement creates an object of type SecurityRequirement if possible, returning an error if not. +func NewSecurityRequirement(in interface{}, context *compiler.Context) (*SecurityRequirement, error) { + errors := make([]error, 0) + x := &SecurityRequirement{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedStringArray additional_properties = 1; + // MAP: StringArray + x.AdditionalProperties = make([]*NamedStringArray, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + pair := &NamedStringArray{} + pair.Name = k + var err error + pair.Value, err = NewStringArray(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewStringArray creates an object of type StringArray if possible, returning an error if not. +func NewStringArray(in interface{}, context *compiler.Context) (*StringArray, error) { + errors := make([]error, 0) + x := &StringArray{} + a, ok := in.([]interface{}) + if !ok { + message := fmt.Sprintf("has unexpected value for StringArray: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + x.Value = make([]string, 0) + for _, s := range a { + x.Value = append(x.Value, s.(string)) + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewTag creates an object of type Tag if possible, returning an error if not. +func NewTag(in interface{}, context *compiler.Context) (*Tag, error) { + errors := make([]error, 0) + x := &Tag{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"name"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"description", "externalDocs", "name"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 2; + v2 := compiler.MapValueForKey(m, "description") + if v2 != nil { + x.Description, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // ExternalDocs external_docs = 3; + v3 := compiler.MapValueForKey(m, "externalDocs") + if v3 != nil { + var err error + x.ExternalDocs, err = NewExternalDocs(v3, compiler.NewContext("externalDocs", context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny vendor_extension = 4; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewTypeItem creates an object of type TypeItem if possible, returning an error if not. +func NewTypeItem(in interface{}, context *compiler.Context) (*TypeItem, error) { + errors := make([]error, 0) + x := &TypeItem{} + switch in := in.(type) { + case string: + x.Value = make([]string, 0) + x.Value = append(x.Value, in) + case []interface{}: + x.Value = make([]string, 0) + for _, v := range in { + value, ok := v.(string) + if ok { + x.Value = append(x.Value, value) + } else { + message := fmt.Sprintf("has unexpected value for string array element: %+v (%T)", value, value) + errors = append(errors, compiler.NewError(context, message)) + } + } + default: + message := fmt.Sprintf("has unexpected value for string array: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewVendorExtension creates an object of type VendorExtension if possible, returning an error if not. +func NewVendorExtension(in interface{}, context *compiler.Context) (*VendorExtension, error) { + errors := make([]error, 0) + x := &VendorExtension{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedAny additional_properties = 1; + // MAP: Any + x.AdditionalProperties = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewXml creates an object of type Xml if possible, returning an error if not. +func NewXml(in interface{}, context *compiler.Context) (*Xml, error) { + errors := make([]error, 0) + x := &Xml{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"attribute", "name", "namespace", "prefix", "wrapped"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string namespace = 2; + v2 := compiler.MapValueForKey(m, "namespace") + if v2 != nil { + x.Namespace, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for namespace: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string prefix = 3; + v3 := compiler.MapValueForKey(m, "prefix") + if v3 != nil { + x.Prefix, ok = v3.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for prefix: %+v (%T)", v3, v3) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool attribute = 4; + v4 := compiler.MapValueForKey(m, "attribute") + if v4 != nil { + x.Attribute, ok = v4.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for attribute: %+v (%T)", v4, v4) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool wrapped = 5; + v5 := compiler.MapValueForKey(m, "wrapped") + if v5 != nil { + x.Wrapped, ok = v5.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for wrapped: %+v (%T)", v5, v5) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 6; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside AdditionalPropertiesItem objects. +func (m *AdditionalPropertiesItem) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*AdditionalPropertiesItem_Schema) + if ok { + _, err := p.Schema.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Any objects. +func (m *Any) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ApiKeySecurity objects. +func (m *ApiKeySecurity) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside BasicAuthenticationSecurity objects. +func (m *BasicAuthenticationSecurity) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside BodyParameter objects. +func (m *BodyParameter) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Schema != nil { + _, err := m.Schema.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Contact objects. +func (m *Contact) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Default objects. +func (m *Default) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Definitions objects. +func (m *Definitions) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Document objects. +func (m *Document) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Info != nil { + _, err := m.Info.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Paths != nil { + _, err := m.Paths.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Definitions != nil { + _, err := m.Definitions.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Parameters != nil { + _, err := m.Parameters.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Responses != nil { + _, err := m.Responses.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Security { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + if m.SecurityDefinitions != nil { + _, err := m.SecurityDefinitions.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Tags { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + if m.ExternalDocs != nil { + _, err := m.ExternalDocs.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Examples objects. +func (m *Examples) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ExternalDocs objects. +func (m *ExternalDocs) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside FileSchema objects. +func (m *FileSchema) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Default != nil { + _, err := m.Default.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.ExternalDocs != nil { + _, err := m.ExternalDocs.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Example != nil { + _, err := m.Example.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside FormDataParameterSubSchema objects. +func (m *FormDataParameterSubSchema) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Items != nil { + _, err := m.Items.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Default != nil { + _, err := m.Default.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Enum { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Header objects. +func (m *Header) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Items != nil { + _, err := m.Items.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Default != nil { + _, err := m.Default.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Enum { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside HeaderParameterSubSchema objects. +func (m *HeaderParameterSubSchema) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Items != nil { + _, err := m.Items.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Default != nil { + _, err := m.Default.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Enum { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Headers objects. +func (m *Headers) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Info objects. +func (m *Info) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Contact != nil { + _, err := m.Contact.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.License != nil { + _, err := m.License.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ItemsItem objects. +func (m *ItemsItem) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.Schema { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside JsonReference objects. +func (m *JsonReference) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.XRef != "" { + info, err := compiler.ReadInfoForRef(root, m.XRef) + if err != nil { + return nil, err + } + if info != nil { + replacement, err := NewJsonReference(info, nil) + if err == nil { + *m = *replacement + return m.ResolveReferences(root) + } + } + return info, nil + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside License objects. +func (m *License) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedAny objects. +func (m *NamedAny) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedHeader objects. +func (m *NamedHeader) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedParameter objects. +func (m *NamedParameter) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedPathItem objects. +func (m *NamedPathItem) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedResponse objects. +func (m *NamedResponse) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedResponseValue objects. +func (m *NamedResponseValue) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedSchema objects. +func (m *NamedSchema) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedSecurityDefinitionsItem objects. +func (m *NamedSecurityDefinitionsItem) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedString objects. +func (m *NamedString) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedStringArray objects. +func (m *NamedStringArray) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NonBodyParameter objects. +func (m *NonBodyParameter) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*NonBodyParameter_HeaderParameterSubSchema) + if ok { + _, err := p.HeaderParameterSubSchema.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*NonBodyParameter_FormDataParameterSubSchema) + if ok { + _, err := p.FormDataParameterSubSchema.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*NonBodyParameter_QueryParameterSubSchema) + if ok { + _, err := p.QueryParameterSubSchema.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*NonBodyParameter_PathParameterSubSchema) + if ok { + _, err := p.PathParameterSubSchema.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Oauth2AccessCodeSecurity objects. +func (m *Oauth2AccessCodeSecurity) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Scopes != nil { + _, err := m.Scopes.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Oauth2ApplicationSecurity objects. +func (m *Oauth2ApplicationSecurity) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Scopes != nil { + _, err := m.Scopes.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Oauth2ImplicitSecurity objects. +func (m *Oauth2ImplicitSecurity) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Scopes != nil { + _, err := m.Scopes.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Oauth2PasswordSecurity objects. +func (m *Oauth2PasswordSecurity) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Scopes != nil { + _, err := m.Scopes.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Oauth2Scopes objects. +func (m *Oauth2Scopes) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Operation objects. +func (m *Operation) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.ExternalDocs != nil { + _, err := m.ExternalDocs.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Parameters { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + if m.Responses != nil { + _, err := m.Responses.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Security { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Parameter objects. +func (m *Parameter) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*Parameter_BodyParameter) + if ok { + _, err := p.BodyParameter.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*Parameter_NonBodyParameter) + if ok { + _, err := p.NonBodyParameter.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ParameterDefinitions objects. +func (m *ParameterDefinitions) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ParametersItem objects. +func (m *ParametersItem) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*ParametersItem_Parameter) + if ok { + _, err := p.Parameter.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*ParametersItem_JsonReference) + if ok { + info, err := p.JsonReference.ResolveReferences(root) + if err != nil { + return nil, err + } else if info != nil { + n, err := NewParametersItem(info, nil) + if err != nil { + return nil, err + } else if n != nil { + *m = *n + return nil, nil + } + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside PathItem objects. +func (m *PathItem) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.XRef != "" { + info, err := compiler.ReadInfoForRef(root, m.XRef) + if err != nil { + return nil, err + } + if info != nil { + replacement, err := NewPathItem(info, nil) + if err == nil { + *m = *replacement + return m.ResolveReferences(root) + } + } + return info, nil + } + if m.Get != nil { + _, err := m.Get.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Put != nil { + _, err := m.Put.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Post != nil { + _, err := m.Post.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Delete != nil { + _, err := m.Delete.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Options != nil { + _, err := m.Options.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Head != nil { + _, err := m.Head.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Patch != nil { + _, err := m.Patch.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Parameters { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside PathParameterSubSchema objects. +func (m *PathParameterSubSchema) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Items != nil { + _, err := m.Items.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Default != nil { + _, err := m.Default.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Enum { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Paths objects. +func (m *Paths) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.Path { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside PrimitivesItems objects. +func (m *PrimitivesItems) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Items != nil { + _, err := m.Items.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Default != nil { + _, err := m.Default.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Enum { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Properties objects. +func (m *Properties) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside QueryParameterSubSchema objects. +func (m *QueryParameterSubSchema) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Items != nil { + _, err := m.Items.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Default != nil { + _, err := m.Default.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Enum { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Response objects. +func (m *Response) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Schema != nil { + _, err := m.Schema.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Headers != nil { + _, err := m.Headers.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Examples != nil { + _, err := m.Examples.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ResponseDefinitions objects. +func (m *ResponseDefinitions) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ResponseValue objects. +func (m *ResponseValue) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*ResponseValue_Response) + if ok { + _, err := p.Response.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*ResponseValue_JsonReference) + if ok { + info, err := p.JsonReference.ResolveReferences(root) + if err != nil { + return nil, err + } else if info != nil { + n, err := NewResponseValue(info, nil) + if err != nil { + return nil, err + } else if n != nil { + *m = *n + return nil, nil + } + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Responses objects. +func (m *Responses) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.ResponseCode { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Schema objects. +func (m *Schema) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.XRef != "" { + info, err := compiler.ReadInfoForRef(root, m.XRef) + if err != nil { + return nil, err + } + if info != nil { + replacement, err := NewSchema(info, nil) + if err == nil { + *m = *replacement + return m.ResolveReferences(root) + } + } + return info, nil + } + if m.Default != nil { + _, err := m.Default.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Enum { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + if m.AdditionalProperties != nil { + _, err := m.AdditionalProperties.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Type != nil { + _, err := m.Type.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Items != nil { + _, err := m.Items.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.AllOf { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + if m.Properties != nil { + _, err := m.Properties.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Xml != nil { + _, err := m.Xml.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.ExternalDocs != nil { + _, err := m.ExternalDocs.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Example != nil { + _, err := m.Example.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside SchemaItem objects. +func (m *SchemaItem) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*SchemaItem_Schema) + if ok { + _, err := p.Schema.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*SchemaItem_FileSchema) + if ok { + _, err := p.FileSchema.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside SecurityDefinitions objects. +func (m *SecurityDefinitions) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside SecurityDefinitionsItem objects. +func (m *SecurityDefinitionsItem) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*SecurityDefinitionsItem_BasicAuthenticationSecurity) + if ok { + _, err := p.BasicAuthenticationSecurity.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*SecurityDefinitionsItem_ApiKeySecurity) + if ok { + _, err := p.ApiKeySecurity.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*SecurityDefinitionsItem_Oauth2ImplicitSecurity) + if ok { + _, err := p.Oauth2ImplicitSecurity.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*SecurityDefinitionsItem_Oauth2PasswordSecurity) + if ok { + _, err := p.Oauth2PasswordSecurity.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*SecurityDefinitionsItem_Oauth2ApplicationSecurity) + if ok { + _, err := p.Oauth2ApplicationSecurity.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*SecurityDefinitionsItem_Oauth2AccessCodeSecurity) + if ok { + _, err := p.Oauth2AccessCodeSecurity.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside SecurityRequirement objects. +func (m *SecurityRequirement) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside StringArray objects. +func (m *StringArray) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Tag objects. +func (m *Tag) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.ExternalDocs != nil { + _, err := m.ExternalDocs.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside TypeItem objects. +func (m *TypeItem) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside VendorExtension objects. +func (m *VendorExtension) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Xml objects. +func (m *Xml) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ToRawInfo returns a description of AdditionalPropertiesItem suitable for JSON or YAML export. +func (m *AdditionalPropertiesItem) ToRawInfo() interface{} { + // ONE OF WRAPPER + // AdditionalPropertiesItem + // {Name:schema Type:Schema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetSchema() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:boolean Type:bool StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if v1, ok := m.GetOneof().(*AdditionalPropertiesItem_Boolean); ok { + return v1.Boolean + } + return nil +} + +// ToRawInfo returns a description of Any suitable for JSON or YAML export. +func (m *Any) ToRawInfo() interface{} { + var err error + var info1 []yaml.MapSlice + err = yaml.Unmarshal([]byte(m.Yaml), &info1) + if err == nil { + return info1 + } + var info2 yaml.MapSlice + err = yaml.Unmarshal([]byte(m.Yaml), &info2) + if err == nil { + return info2 + } + var info3 interface{} + err = yaml.Unmarshal([]byte(m.Yaml), &info3) + if err == nil { + return info3 + } + return nil +} + +// ToRawInfo returns a description of ApiKeySecurity suitable for JSON or YAML export. +func (m *ApiKeySecurity) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Type != "" { + info = append(info, yaml.MapItem{"type", m.Type}) + } + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + if m.In != "" { + info = append(info, yaml.MapItem{"in", m.In}) + } + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:VendorExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of BasicAuthenticationSecurity suitable for JSON or YAML export. +func (m *BasicAuthenticationSecurity) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Type != "" { + info = append(info, yaml.MapItem{"type", m.Type}) + } + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:VendorExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of BodyParameter suitable for JSON or YAML export. +func (m *BodyParameter) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + if m.In != "" { + info = append(info, yaml.MapItem{"in", m.In}) + } + if m.Required != false { + info = append(info, yaml.MapItem{"required", m.Required}) + } + if m.Schema != nil { + info = append(info, yaml.MapItem{"schema", m.Schema.ToRawInfo()}) + } + // &{Name:schema Type:Schema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:VendorExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Contact suitable for JSON or YAML export. +func (m *Contact) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + if m.Url != "" { + info = append(info, yaml.MapItem{"url", m.Url}) + } + if m.Email != "" { + info = append(info, yaml.MapItem{"email", m.Email}) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:VendorExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Default suitable for JSON or YAML export. +func (m *Default) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:additionalProperties Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern: Implicit:false Description:} + return info +} + +// ToRawInfo returns a description of Definitions suitable for JSON or YAML export. +func (m *Definitions) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:additionalProperties Type:NamedSchema StringEnumValues:[] MapType:Schema Repeated:true Pattern: Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Document suitable for JSON or YAML export. +func (m *Document) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Swagger != "" { + info = append(info, yaml.MapItem{"swagger", m.Swagger}) + } + if m.Info != nil { + info = append(info, yaml.MapItem{"info", m.Info.ToRawInfo()}) + } + // &{Name:info Type:Info StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Host != "" { + info = append(info, yaml.MapItem{"host", m.Host}) + } + if m.BasePath != "" { + info = append(info, yaml.MapItem{"basePath", m.BasePath}) + } + if len(m.Schemes) != 0 { + info = append(info, yaml.MapItem{"schemes", m.Schemes}) + } + if len(m.Consumes) != 0 { + info = append(info, yaml.MapItem{"consumes", m.Consumes}) + } + if len(m.Produces) != 0 { + info = append(info, yaml.MapItem{"produces", m.Produces}) + } + if m.Paths != nil { + info = append(info, yaml.MapItem{"paths", m.Paths.ToRawInfo()}) + } + // &{Name:paths Type:Paths StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Definitions != nil { + info = append(info, yaml.MapItem{"definitions", m.Definitions.ToRawInfo()}) + } + // &{Name:definitions Type:Definitions StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Parameters != nil { + info = append(info, yaml.MapItem{"parameters", m.Parameters.ToRawInfo()}) + } + // &{Name:parameters Type:ParameterDefinitions StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Responses != nil { + info = append(info, yaml.MapItem{"responses", m.Responses.ToRawInfo()}) + } + // &{Name:responses Type:ResponseDefinitions StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if len(m.Security) != 0 { + items := make([]interface{}, 0) + for _, item := range m.Security { + items = append(items, item.ToRawInfo()) + } + info = append(info, yaml.MapItem{"security", items}) + } + // &{Name:security Type:SecurityRequirement StringEnumValues:[] MapType: Repeated:true Pattern: Implicit:false Description:} + if m.SecurityDefinitions != nil { + info = append(info, yaml.MapItem{"securityDefinitions", m.SecurityDefinitions.ToRawInfo()}) + } + // &{Name:securityDefinitions Type:SecurityDefinitions StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if len(m.Tags) != 0 { + items := make([]interface{}, 0) + for _, item := range m.Tags { + items = append(items, item.ToRawInfo()) + } + info = append(info, yaml.MapItem{"tags", items}) + } + // &{Name:tags Type:Tag StringEnumValues:[] MapType: Repeated:true Pattern: Implicit:false Description:} + if m.ExternalDocs != nil { + info = append(info, yaml.MapItem{"externalDocs", m.ExternalDocs.ToRawInfo()}) + } + // &{Name:externalDocs Type:ExternalDocs StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:VendorExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Examples suitable for JSON or YAML export. +func (m *Examples) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:additionalProperties Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern: Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of ExternalDocs suitable for JSON or YAML export. +func (m *ExternalDocs) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.Url != "" { + info = append(info, yaml.MapItem{"url", m.Url}) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:VendorExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of FileSchema suitable for JSON or YAML export. +func (m *FileSchema) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Format != "" { + info = append(info, yaml.MapItem{"format", m.Format}) + } + if m.Title != "" { + info = append(info, yaml.MapItem{"title", m.Title}) + } + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.Default != nil { + info = append(info, yaml.MapItem{"default", m.Default.ToRawInfo()}) + } + // &{Name:default Type:Any StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if len(m.Required) != 0 { + info = append(info, yaml.MapItem{"required", m.Required}) + } + if m.Type != "" { + info = append(info, yaml.MapItem{"type", m.Type}) + } + if m.ReadOnly != false { + info = append(info, yaml.MapItem{"readOnly", m.ReadOnly}) + } + if m.ExternalDocs != nil { + info = append(info, yaml.MapItem{"externalDocs", m.ExternalDocs.ToRawInfo()}) + } + // &{Name:externalDocs Type:ExternalDocs StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Example != nil { + info = append(info, yaml.MapItem{"example", m.Example.ToRawInfo()}) + } + // &{Name:example Type:Any StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:VendorExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of FormDataParameterSubSchema suitable for JSON or YAML export. +func (m *FormDataParameterSubSchema) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Required != false { + info = append(info, yaml.MapItem{"required", m.Required}) + } + if m.In != "" { + info = append(info, yaml.MapItem{"in", m.In}) + } + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + if m.AllowEmptyValue != false { + info = append(info, yaml.MapItem{"allowEmptyValue", m.AllowEmptyValue}) + } + if m.Type != "" { + info = append(info, yaml.MapItem{"type", m.Type}) + } + if m.Format != "" { + info = append(info, yaml.MapItem{"format", m.Format}) + } + if m.Items != nil { + info = append(info, yaml.MapItem{"items", m.Items.ToRawInfo()}) + } + // &{Name:items Type:PrimitivesItems StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.CollectionFormat != "" { + info = append(info, yaml.MapItem{"collectionFormat", m.CollectionFormat}) + } + if m.Default != nil { + info = append(info, yaml.MapItem{"default", m.Default.ToRawInfo()}) + } + // &{Name:default Type:Any StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Maximum != 0.0 { + info = append(info, yaml.MapItem{"maximum", m.Maximum}) + } + if m.ExclusiveMaximum != false { + info = append(info, yaml.MapItem{"exclusiveMaximum", m.ExclusiveMaximum}) + } + if m.Minimum != 0.0 { + info = append(info, yaml.MapItem{"minimum", m.Minimum}) + } + if m.ExclusiveMinimum != false { + info = append(info, yaml.MapItem{"exclusiveMinimum", m.ExclusiveMinimum}) + } + if m.MaxLength != 0 { + info = append(info, yaml.MapItem{"maxLength", m.MaxLength}) + } + if m.MinLength != 0 { + info = append(info, yaml.MapItem{"minLength", m.MinLength}) + } + if m.Pattern != "" { + info = append(info, yaml.MapItem{"pattern", m.Pattern}) + } + if m.MaxItems != 0 { + info = append(info, yaml.MapItem{"maxItems", m.MaxItems}) + } + if m.MinItems != 0 { + info = append(info, yaml.MapItem{"minItems", m.MinItems}) + } + if m.UniqueItems != false { + info = append(info, yaml.MapItem{"uniqueItems", m.UniqueItems}) + } + if len(m.Enum) != 0 { + items := make([]interface{}, 0) + for _, item := range m.Enum { + items = append(items, item.ToRawInfo()) + } + info = append(info, yaml.MapItem{"enum", items}) + } + // &{Name:enum Type:Any StringEnumValues:[] MapType: Repeated:true Pattern: Implicit:false Description:} + if m.MultipleOf != 0.0 { + info = append(info, yaml.MapItem{"multipleOf", m.MultipleOf}) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:VendorExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Header suitable for JSON or YAML export. +func (m *Header) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Type != "" { + info = append(info, yaml.MapItem{"type", m.Type}) + } + if m.Format != "" { + info = append(info, yaml.MapItem{"format", m.Format}) + } + if m.Items != nil { + info = append(info, yaml.MapItem{"items", m.Items.ToRawInfo()}) + } + // &{Name:items Type:PrimitivesItems StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.CollectionFormat != "" { + info = append(info, yaml.MapItem{"collectionFormat", m.CollectionFormat}) + } + if m.Default != nil { + info = append(info, yaml.MapItem{"default", m.Default.ToRawInfo()}) + } + // &{Name:default Type:Any StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Maximum != 0.0 { + info = append(info, yaml.MapItem{"maximum", m.Maximum}) + } + if m.ExclusiveMaximum != false { + info = append(info, yaml.MapItem{"exclusiveMaximum", m.ExclusiveMaximum}) + } + if m.Minimum != 0.0 { + info = append(info, yaml.MapItem{"minimum", m.Minimum}) + } + if m.ExclusiveMinimum != false { + info = append(info, yaml.MapItem{"exclusiveMinimum", m.ExclusiveMinimum}) + } + if m.MaxLength != 0 { + info = append(info, yaml.MapItem{"maxLength", m.MaxLength}) + } + if m.MinLength != 0 { + info = append(info, yaml.MapItem{"minLength", m.MinLength}) + } + if m.Pattern != "" { + info = append(info, yaml.MapItem{"pattern", m.Pattern}) + } + if m.MaxItems != 0 { + info = append(info, yaml.MapItem{"maxItems", m.MaxItems}) + } + if m.MinItems != 0 { + info = append(info, yaml.MapItem{"minItems", m.MinItems}) + } + if m.UniqueItems != false { + info = append(info, yaml.MapItem{"uniqueItems", m.UniqueItems}) + } + if len(m.Enum) != 0 { + items := make([]interface{}, 0) + for _, item := range m.Enum { + items = append(items, item.ToRawInfo()) + } + info = append(info, yaml.MapItem{"enum", items}) + } + // &{Name:enum Type:Any StringEnumValues:[] MapType: Repeated:true Pattern: Implicit:false Description:} + if m.MultipleOf != 0.0 { + info = append(info, yaml.MapItem{"multipleOf", m.MultipleOf}) + } + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:VendorExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of HeaderParameterSubSchema suitable for JSON or YAML export. +func (m *HeaderParameterSubSchema) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Required != false { + info = append(info, yaml.MapItem{"required", m.Required}) + } + if m.In != "" { + info = append(info, yaml.MapItem{"in", m.In}) + } + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + if m.Type != "" { + info = append(info, yaml.MapItem{"type", m.Type}) + } + if m.Format != "" { + info = append(info, yaml.MapItem{"format", m.Format}) + } + if m.Items != nil { + info = append(info, yaml.MapItem{"items", m.Items.ToRawInfo()}) + } + // &{Name:items Type:PrimitivesItems StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.CollectionFormat != "" { + info = append(info, yaml.MapItem{"collectionFormat", m.CollectionFormat}) + } + if m.Default != nil { + info = append(info, yaml.MapItem{"default", m.Default.ToRawInfo()}) + } + // &{Name:default Type:Any StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Maximum != 0.0 { + info = append(info, yaml.MapItem{"maximum", m.Maximum}) + } + if m.ExclusiveMaximum != false { + info = append(info, yaml.MapItem{"exclusiveMaximum", m.ExclusiveMaximum}) + } + if m.Minimum != 0.0 { + info = append(info, yaml.MapItem{"minimum", m.Minimum}) + } + if m.ExclusiveMinimum != false { + info = append(info, yaml.MapItem{"exclusiveMinimum", m.ExclusiveMinimum}) + } + if m.MaxLength != 0 { + info = append(info, yaml.MapItem{"maxLength", m.MaxLength}) + } + if m.MinLength != 0 { + info = append(info, yaml.MapItem{"minLength", m.MinLength}) + } + if m.Pattern != "" { + info = append(info, yaml.MapItem{"pattern", m.Pattern}) + } + if m.MaxItems != 0 { + info = append(info, yaml.MapItem{"maxItems", m.MaxItems}) + } + if m.MinItems != 0 { + info = append(info, yaml.MapItem{"minItems", m.MinItems}) + } + if m.UniqueItems != false { + info = append(info, yaml.MapItem{"uniqueItems", m.UniqueItems}) + } + if len(m.Enum) != 0 { + items := make([]interface{}, 0) + for _, item := range m.Enum { + items = append(items, item.ToRawInfo()) + } + info = append(info, yaml.MapItem{"enum", items}) + } + // &{Name:enum Type:Any StringEnumValues:[] MapType: Repeated:true Pattern: Implicit:false Description:} + if m.MultipleOf != 0.0 { + info = append(info, yaml.MapItem{"multipleOf", m.MultipleOf}) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:VendorExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Headers suitable for JSON or YAML export. +func (m *Headers) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:additionalProperties Type:NamedHeader StringEnumValues:[] MapType:Header Repeated:true Pattern: Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Info suitable for JSON or YAML export. +func (m *Info) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Title != "" { + info = append(info, yaml.MapItem{"title", m.Title}) + } + if m.Version != "" { + info = append(info, yaml.MapItem{"version", m.Version}) + } + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.TermsOfService != "" { + info = append(info, yaml.MapItem{"termsOfService", m.TermsOfService}) + } + if m.Contact != nil { + info = append(info, yaml.MapItem{"contact", m.Contact.ToRawInfo()}) + } + // &{Name:contact Type:Contact StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.License != nil { + info = append(info, yaml.MapItem{"license", m.License.ToRawInfo()}) + } + // &{Name:license Type:License StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:VendorExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of ItemsItem suitable for JSON or YAML export. +func (m *ItemsItem) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if len(m.Schema) != 0 { + items := make([]interface{}, 0) + for _, item := range m.Schema { + items = append(items, item.ToRawInfo()) + } + info = append(info, yaml.MapItem{"schema", items}) + } + // &{Name:schema Type:Schema StringEnumValues:[] MapType: Repeated:true Pattern: Implicit:false Description:} + return info +} + +// ToRawInfo returns a description of JsonReference suitable for JSON or YAML export. +func (m *JsonReference) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.XRef != "" { + info = append(info, yaml.MapItem{"$ref", m.XRef}) + } + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + return info +} + +// ToRawInfo returns a description of License suitable for JSON or YAML export. +func (m *License) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + if m.Url != "" { + info = append(info, yaml.MapItem{"url", m.Url}) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:VendorExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of NamedAny suitable for JSON or YAML export. +func (m *NamedAny) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + // &{Name:value Type:Any StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedHeader suitable for JSON or YAML export. +func (m *NamedHeader) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + // &{Name:value Type:Header StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedParameter suitable for JSON or YAML export. +func (m *NamedParameter) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + // &{Name:value Type:Parameter StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedPathItem suitable for JSON or YAML export. +func (m *NamedPathItem) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + // &{Name:value Type:PathItem StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedResponse suitable for JSON or YAML export. +func (m *NamedResponse) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + // &{Name:value Type:Response StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedResponseValue suitable for JSON or YAML export. +func (m *NamedResponseValue) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + // &{Name:value Type:ResponseValue StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedSchema suitable for JSON or YAML export. +func (m *NamedSchema) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + // &{Name:value Type:Schema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedSecurityDefinitionsItem suitable for JSON or YAML export. +func (m *NamedSecurityDefinitionsItem) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + // &{Name:value Type:SecurityDefinitionsItem StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedString suitable for JSON or YAML export. +func (m *NamedString) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + if m.Value != "" { + info = append(info, yaml.MapItem{"value", m.Value}) + } + return info +} + +// ToRawInfo returns a description of NamedStringArray suitable for JSON or YAML export. +func (m *NamedStringArray) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + // &{Name:value Type:StringArray StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NonBodyParameter suitable for JSON or YAML export. +func (m *NonBodyParameter) ToRawInfo() interface{} { + // ONE OF WRAPPER + // NonBodyParameter + // {Name:headerParameterSubSchema Type:HeaderParameterSubSchema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetHeaderParameterSubSchema() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:formDataParameterSubSchema Type:FormDataParameterSubSchema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetFormDataParameterSubSchema() + if v1 != nil { + return v1.ToRawInfo() + } + // {Name:queryParameterSubSchema Type:QueryParameterSubSchema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v2 := m.GetQueryParameterSubSchema() + if v2 != nil { + return v2.ToRawInfo() + } + // {Name:pathParameterSubSchema Type:PathParameterSubSchema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v3 := m.GetPathParameterSubSchema() + if v3 != nil { + return v3.ToRawInfo() + } + return nil +} + +// ToRawInfo returns a description of Oauth2AccessCodeSecurity suitable for JSON or YAML export. +func (m *Oauth2AccessCodeSecurity) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Type != "" { + info = append(info, yaml.MapItem{"type", m.Type}) + } + if m.Flow != "" { + info = append(info, yaml.MapItem{"flow", m.Flow}) + } + if m.Scopes != nil { + info = append(info, yaml.MapItem{"scopes", m.Scopes.ToRawInfo()}) + } + // &{Name:scopes Type:Oauth2Scopes StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.AuthorizationUrl != "" { + info = append(info, yaml.MapItem{"authorizationUrl", m.AuthorizationUrl}) + } + if m.TokenUrl != "" { + info = append(info, yaml.MapItem{"tokenUrl", m.TokenUrl}) + } + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:VendorExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Oauth2ApplicationSecurity suitable for JSON or YAML export. +func (m *Oauth2ApplicationSecurity) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Type != "" { + info = append(info, yaml.MapItem{"type", m.Type}) + } + if m.Flow != "" { + info = append(info, yaml.MapItem{"flow", m.Flow}) + } + if m.Scopes != nil { + info = append(info, yaml.MapItem{"scopes", m.Scopes.ToRawInfo()}) + } + // &{Name:scopes Type:Oauth2Scopes StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.TokenUrl != "" { + info = append(info, yaml.MapItem{"tokenUrl", m.TokenUrl}) + } + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:VendorExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Oauth2ImplicitSecurity suitable for JSON or YAML export. +func (m *Oauth2ImplicitSecurity) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Type != "" { + info = append(info, yaml.MapItem{"type", m.Type}) + } + if m.Flow != "" { + info = append(info, yaml.MapItem{"flow", m.Flow}) + } + if m.Scopes != nil { + info = append(info, yaml.MapItem{"scopes", m.Scopes.ToRawInfo()}) + } + // &{Name:scopes Type:Oauth2Scopes StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.AuthorizationUrl != "" { + info = append(info, yaml.MapItem{"authorizationUrl", m.AuthorizationUrl}) + } + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:VendorExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Oauth2PasswordSecurity suitable for JSON or YAML export. +func (m *Oauth2PasswordSecurity) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Type != "" { + info = append(info, yaml.MapItem{"type", m.Type}) + } + if m.Flow != "" { + info = append(info, yaml.MapItem{"flow", m.Flow}) + } + if m.Scopes != nil { + info = append(info, yaml.MapItem{"scopes", m.Scopes.ToRawInfo()}) + } + // &{Name:scopes Type:Oauth2Scopes StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.TokenUrl != "" { + info = append(info, yaml.MapItem{"tokenUrl", m.TokenUrl}) + } + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:VendorExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Oauth2Scopes suitable for JSON or YAML export. +func (m *Oauth2Scopes) ToRawInfo() interface{} { + info := yaml.MapSlice{} + // &{Name:additionalProperties Type:NamedString StringEnumValues:[] MapType:string Repeated:true Pattern: Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Operation suitable for JSON or YAML export. +func (m *Operation) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if len(m.Tags) != 0 { + info = append(info, yaml.MapItem{"tags", m.Tags}) + } + if m.Summary != "" { + info = append(info, yaml.MapItem{"summary", m.Summary}) + } + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.ExternalDocs != nil { + info = append(info, yaml.MapItem{"externalDocs", m.ExternalDocs.ToRawInfo()}) + } + // &{Name:externalDocs Type:ExternalDocs StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.OperationId != "" { + info = append(info, yaml.MapItem{"operationId", m.OperationId}) + } + if len(m.Produces) != 0 { + info = append(info, yaml.MapItem{"produces", m.Produces}) + } + if len(m.Consumes) != 0 { + info = append(info, yaml.MapItem{"consumes", m.Consumes}) + } + if len(m.Parameters) != 0 { + items := make([]interface{}, 0) + for _, item := range m.Parameters { + items = append(items, item.ToRawInfo()) + } + info = append(info, yaml.MapItem{"parameters", items}) + } + // &{Name:parameters Type:ParametersItem StringEnumValues:[] MapType: Repeated:true Pattern: Implicit:false Description:The parameters needed to send a valid API call.} + if m.Responses != nil { + info = append(info, yaml.MapItem{"responses", m.Responses.ToRawInfo()}) + } + // &{Name:responses Type:Responses StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if len(m.Schemes) != 0 { + info = append(info, yaml.MapItem{"schemes", m.Schemes}) + } + if m.Deprecated != false { + info = append(info, yaml.MapItem{"deprecated", m.Deprecated}) + } + if len(m.Security) != 0 { + items := make([]interface{}, 0) + for _, item := range m.Security { + items = append(items, item.ToRawInfo()) + } + info = append(info, yaml.MapItem{"security", items}) + } + // &{Name:security Type:SecurityRequirement StringEnumValues:[] MapType: Repeated:true Pattern: Implicit:false Description:} + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:VendorExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Parameter suitable for JSON or YAML export. +func (m *Parameter) ToRawInfo() interface{} { + // ONE OF WRAPPER + // Parameter + // {Name:bodyParameter Type:BodyParameter StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetBodyParameter() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:nonBodyParameter Type:NonBodyParameter StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetNonBodyParameter() + if v1 != nil { + return v1.ToRawInfo() + } + return nil +} + +// ToRawInfo returns a description of ParameterDefinitions suitable for JSON or YAML export. +func (m *ParameterDefinitions) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:additionalProperties Type:NamedParameter StringEnumValues:[] MapType:Parameter Repeated:true Pattern: Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of ParametersItem suitable for JSON or YAML export. +func (m *ParametersItem) ToRawInfo() interface{} { + // ONE OF WRAPPER + // ParametersItem + // {Name:parameter Type:Parameter StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetParameter() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:jsonReference Type:JsonReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetJsonReference() + if v1 != nil { + return v1.ToRawInfo() + } + return nil +} + +// ToRawInfo returns a description of PathItem suitable for JSON or YAML export. +func (m *PathItem) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.XRef != "" { + info = append(info, yaml.MapItem{"$ref", m.XRef}) + } + if m.Get != nil { + info = append(info, yaml.MapItem{"get", m.Get.ToRawInfo()}) + } + // &{Name:get Type:Operation StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Put != nil { + info = append(info, yaml.MapItem{"put", m.Put.ToRawInfo()}) + } + // &{Name:put Type:Operation StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Post != nil { + info = append(info, yaml.MapItem{"post", m.Post.ToRawInfo()}) + } + // &{Name:post Type:Operation StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Delete != nil { + info = append(info, yaml.MapItem{"delete", m.Delete.ToRawInfo()}) + } + // &{Name:delete Type:Operation StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Options != nil { + info = append(info, yaml.MapItem{"options", m.Options.ToRawInfo()}) + } + // &{Name:options Type:Operation StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Head != nil { + info = append(info, yaml.MapItem{"head", m.Head.ToRawInfo()}) + } + // &{Name:head Type:Operation StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Patch != nil { + info = append(info, yaml.MapItem{"patch", m.Patch.ToRawInfo()}) + } + // &{Name:patch Type:Operation StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if len(m.Parameters) != 0 { + items := make([]interface{}, 0) + for _, item := range m.Parameters { + items = append(items, item.ToRawInfo()) + } + info = append(info, yaml.MapItem{"parameters", items}) + } + // &{Name:parameters Type:ParametersItem StringEnumValues:[] MapType: Repeated:true Pattern: Implicit:false Description:The parameters needed to send a valid API call.} + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:VendorExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of PathParameterSubSchema suitable for JSON or YAML export. +func (m *PathParameterSubSchema) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Required != false { + info = append(info, yaml.MapItem{"required", m.Required}) + } + if m.In != "" { + info = append(info, yaml.MapItem{"in", m.In}) + } + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + if m.Type != "" { + info = append(info, yaml.MapItem{"type", m.Type}) + } + if m.Format != "" { + info = append(info, yaml.MapItem{"format", m.Format}) + } + if m.Items != nil { + info = append(info, yaml.MapItem{"items", m.Items.ToRawInfo()}) + } + // &{Name:items Type:PrimitivesItems StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.CollectionFormat != "" { + info = append(info, yaml.MapItem{"collectionFormat", m.CollectionFormat}) + } + if m.Default != nil { + info = append(info, yaml.MapItem{"default", m.Default.ToRawInfo()}) + } + // &{Name:default Type:Any StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Maximum != 0.0 { + info = append(info, yaml.MapItem{"maximum", m.Maximum}) + } + if m.ExclusiveMaximum != false { + info = append(info, yaml.MapItem{"exclusiveMaximum", m.ExclusiveMaximum}) + } + if m.Minimum != 0.0 { + info = append(info, yaml.MapItem{"minimum", m.Minimum}) + } + if m.ExclusiveMinimum != false { + info = append(info, yaml.MapItem{"exclusiveMinimum", m.ExclusiveMinimum}) + } + if m.MaxLength != 0 { + info = append(info, yaml.MapItem{"maxLength", m.MaxLength}) + } + if m.MinLength != 0 { + info = append(info, yaml.MapItem{"minLength", m.MinLength}) + } + if m.Pattern != "" { + info = append(info, yaml.MapItem{"pattern", m.Pattern}) + } + if m.MaxItems != 0 { + info = append(info, yaml.MapItem{"maxItems", m.MaxItems}) + } + if m.MinItems != 0 { + info = append(info, yaml.MapItem{"minItems", m.MinItems}) + } + if m.UniqueItems != false { + info = append(info, yaml.MapItem{"uniqueItems", m.UniqueItems}) + } + if len(m.Enum) != 0 { + items := make([]interface{}, 0) + for _, item := range m.Enum { + items = append(items, item.ToRawInfo()) + } + info = append(info, yaml.MapItem{"enum", items}) + } + // &{Name:enum Type:Any StringEnumValues:[] MapType: Repeated:true Pattern: Implicit:false Description:} + if m.MultipleOf != 0.0 { + info = append(info, yaml.MapItem{"multipleOf", m.MultipleOf}) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:VendorExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Paths suitable for JSON or YAML export. +func (m *Paths) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:VendorExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + if m.Path != nil { + for _, item := range m.Path { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:Path Type:NamedPathItem StringEnumValues:[] MapType:PathItem Repeated:true Pattern:^/ Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of PrimitivesItems suitable for JSON or YAML export. +func (m *PrimitivesItems) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Type != "" { + info = append(info, yaml.MapItem{"type", m.Type}) + } + if m.Format != "" { + info = append(info, yaml.MapItem{"format", m.Format}) + } + if m.Items != nil { + info = append(info, yaml.MapItem{"items", m.Items.ToRawInfo()}) + } + // &{Name:items Type:PrimitivesItems StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.CollectionFormat != "" { + info = append(info, yaml.MapItem{"collectionFormat", m.CollectionFormat}) + } + if m.Default != nil { + info = append(info, yaml.MapItem{"default", m.Default.ToRawInfo()}) + } + // &{Name:default Type:Any StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Maximum != 0.0 { + info = append(info, yaml.MapItem{"maximum", m.Maximum}) + } + if m.ExclusiveMaximum != false { + info = append(info, yaml.MapItem{"exclusiveMaximum", m.ExclusiveMaximum}) + } + if m.Minimum != 0.0 { + info = append(info, yaml.MapItem{"minimum", m.Minimum}) + } + if m.ExclusiveMinimum != false { + info = append(info, yaml.MapItem{"exclusiveMinimum", m.ExclusiveMinimum}) + } + if m.MaxLength != 0 { + info = append(info, yaml.MapItem{"maxLength", m.MaxLength}) + } + if m.MinLength != 0 { + info = append(info, yaml.MapItem{"minLength", m.MinLength}) + } + if m.Pattern != "" { + info = append(info, yaml.MapItem{"pattern", m.Pattern}) + } + if m.MaxItems != 0 { + info = append(info, yaml.MapItem{"maxItems", m.MaxItems}) + } + if m.MinItems != 0 { + info = append(info, yaml.MapItem{"minItems", m.MinItems}) + } + if m.UniqueItems != false { + info = append(info, yaml.MapItem{"uniqueItems", m.UniqueItems}) + } + if len(m.Enum) != 0 { + items := make([]interface{}, 0) + for _, item := range m.Enum { + items = append(items, item.ToRawInfo()) + } + info = append(info, yaml.MapItem{"enum", items}) + } + // &{Name:enum Type:Any StringEnumValues:[] MapType: Repeated:true Pattern: Implicit:false Description:} + if m.MultipleOf != 0.0 { + info = append(info, yaml.MapItem{"multipleOf", m.MultipleOf}) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:VendorExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Properties suitable for JSON or YAML export. +func (m *Properties) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:additionalProperties Type:NamedSchema StringEnumValues:[] MapType:Schema Repeated:true Pattern: Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of QueryParameterSubSchema suitable for JSON or YAML export. +func (m *QueryParameterSubSchema) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Required != false { + info = append(info, yaml.MapItem{"required", m.Required}) + } + if m.In != "" { + info = append(info, yaml.MapItem{"in", m.In}) + } + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + if m.AllowEmptyValue != false { + info = append(info, yaml.MapItem{"allowEmptyValue", m.AllowEmptyValue}) + } + if m.Type != "" { + info = append(info, yaml.MapItem{"type", m.Type}) + } + if m.Format != "" { + info = append(info, yaml.MapItem{"format", m.Format}) + } + if m.Items != nil { + info = append(info, yaml.MapItem{"items", m.Items.ToRawInfo()}) + } + // &{Name:items Type:PrimitivesItems StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.CollectionFormat != "" { + info = append(info, yaml.MapItem{"collectionFormat", m.CollectionFormat}) + } + if m.Default != nil { + info = append(info, yaml.MapItem{"default", m.Default.ToRawInfo()}) + } + // &{Name:default Type:Any StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Maximum != 0.0 { + info = append(info, yaml.MapItem{"maximum", m.Maximum}) + } + if m.ExclusiveMaximum != false { + info = append(info, yaml.MapItem{"exclusiveMaximum", m.ExclusiveMaximum}) + } + if m.Minimum != 0.0 { + info = append(info, yaml.MapItem{"minimum", m.Minimum}) + } + if m.ExclusiveMinimum != false { + info = append(info, yaml.MapItem{"exclusiveMinimum", m.ExclusiveMinimum}) + } + if m.MaxLength != 0 { + info = append(info, yaml.MapItem{"maxLength", m.MaxLength}) + } + if m.MinLength != 0 { + info = append(info, yaml.MapItem{"minLength", m.MinLength}) + } + if m.Pattern != "" { + info = append(info, yaml.MapItem{"pattern", m.Pattern}) + } + if m.MaxItems != 0 { + info = append(info, yaml.MapItem{"maxItems", m.MaxItems}) + } + if m.MinItems != 0 { + info = append(info, yaml.MapItem{"minItems", m.MinItems}) + } + if m.UniqueItems != false { + info = append(info, yaml.MapItem{"uniqueItems", m.UniqueItems}) + } + if len(m.Enum) != 0 { + items := make([]interface{}, 0) + for _, item := range m.Enum { + items = append(items, item.ToRawInfo()) + } + info = append(info, yaml.MapItem{"enum", items}) + } + // &{Name:enum Type:Any StringEnumValues:[] MapType: Repeated:true Pattern: Implicit:false Description:} + if m.MultipleOf != 0.0 { + info = append(info, yaml.MapItem{"multipleOf", m.MultipleOf}) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:VendorExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Response suitable for JSON or YAML export. +func (m *Response) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.Schema != nil { + info = append(info, yaml.MapItem{"schema", m.Schema.ToRawInfo()}) + } + // &{Name:schema Type:SchemaItem StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Headers != nil { + info = append(info, yaml.MapItem{"headers", m.Headers.ToRawInfo()}) + } + // &{Name:headers Type:Headers StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Examples != nil { + info = append(info, yaml.MapItem{"examples", m.Examples.ToRawInfo()}) + } + // &{Name:examples Type:Examples StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:VendorExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of ResponseDefinitions suitable for JSON or YAML export. +func (m *ResponseDefinitions) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:additionalProperties Type:NamedResponse StringEnumValues:[] MapType:Response Repeated:true Pattern: Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of ResponseValue suitable for JSON or YAML export. +func (m *ResponseValue) ToRawInfo() interface{} { + // ONE OF WRAPPER + // ResponseValue + // {Name:response Type:Response StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetResponse() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:jsonReference Type:JsonReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetJsonReference() + if v1 != nil { + return v1.ToRawInfo() + } + return nil +} + +// ToRawInfo returns a description of Responses suitable for JSON or YAML export. +func (m *Responses) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.ResponseCode != nil { + for _, item := range m.ResponseCode { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:ResponseCode Type:NamedResponseValue StringEnumValues:[] MapType:ResponseValue Repeated:true Pattern:^([0-9]{3})$|^(default)$ Implicit:true Description:} + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:VendorExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Schema suitable for JSON or YAML export. +func (m *Schema) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.XRef != "" { + info = append(info, yaml.MapItem{"$ref", m.XRef}) + } + if m.Format != "" { + info = append(info, yaml.MapItem{"format", m.Format}) + } + if m.Title != "" { + info = append(info, yaml.MapItem{"title", m.Title}) + } + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.Default != nil { + info = append(info, yaml.MapItem{"default", m.Default.ToRawInfo()}) + } + // &{Name:default Type:Any StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.MultipleOf != 0.0 { + info = append(info, yaml.MapItem{"multipleOf", m.MultipleOf}) + } + if m.Maximum != 0.0 { + info = append(info, yaml.MapItem{"maximum", m.Maximum}) + } + if m.ExclusiveMaximum != false { + info = append(info, yaml.MapItem{"exclusiveMaximum", m.ExclusiveMaximum}) + } + if m.Minimum != 0.0 { + info = append(info, yaml.MapItem{"minimum", m.Minimum}) + } + if m.ExclusiveMinimum != false { + info = append(info, yaml.MapItem{"exclusiveMinimum", m.ExclusiveMinimum}) + } + if m.MaxLength != 0 { + info = append(info, yaml.MapItem{"maxLength", m.MaxLength}) + } + if m.MinLength != 0 { + info = append(info, yaml.MapItem{"minLength", m.MinLength}) + } + if m.Pattern != "" { + info = append(info, yaml.MapItem{"pattern", m.Pattern}) + } + if m.MaxItems != 0 { + info = append(info, yaml.MapItem{"maxItems", m.MaxItems}) + } + if m.MinItems != 0 { + info = append(info, yaml.MapItem{"minItems", m.MinItems}) + } + if m.UniqueItems != false { + info = append(info, yaml.MapItem{"uniqueItems", m.UniqueItems}) + } + if m.MaxProperties != 0 { + info = append(info, yaml.MapItem{"maxProperties", m.MaxProperties}) + } + if m.MinProperties != 0 { + info = append(info, yaml.MapItem{"minProperties", m.MinProperties}) + } + if len(m.Required) != 0 { + info = append(info, yaml.MapItem{"required", m.Required}) + } + if len(m.Enum) != 0 { + items := make([]interface{}, 0) + for _, item := range m.Enum { + items = append(items, item.ToRawInfo()) + } + info = append(info, yaml.MapItem{"enum", items}) + } + // &{Name:enum Type:Any StringEnumValues:[] MapType: Repeated:true Pattern: Implicit:false Description:} + if m.AdditionalProperties != nil { + info = append(info, yaml.MapItem{"additionalProperties", m.AdditionalProperties.ToRawInfo()}) + } + // &{Name:additionalProperties Type:AdditionalPropertiesItem StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Type != nil { + if len(m.Type.Value) == 1 { + info = append(info, yaml.MapItem{"type", m.Type.Value[0]}) + } else { + info = append(info, yaml.MapItem{"type", m.Type.Value}) + } + } + // &{Name:type Type:TypeItem StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Items != nil { + items := make([]interface{}, 0) + for _, item := range m.Items.Schema { + items = append(items, item.ToRawInfo()) + } + info = append(info, yaml.MapItem{"items", items[0]}) + } + // &{Name:items Type:ItemsItem StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if len(m.AllOf) != 0 { + items := make([]interface{}, 0) + for _, item := range m.AllOf { + items = append(items, item.ToRawInfo()) + } + info = append(info, yaml.MapItem{"allOf", items}) + } + // &{Name:allOf Type:Schema StringEnumValues:[] MapType: Repeated:true Pattern: Implicit:false Description:} + if m.Properties != nil { + info = append(info, yaml.MapItem{"properties", m.Properties.ToRawInfo()}) + } + // &{Name:properties Type:Properties StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Discriminator != "" { + info = append(info, yaml.MapItem{"discriminator", m.Discriminator}) + } + if m.ReadOnly != false { + info = append(info, yaml.MapItem{"readOnly", m.ReadOnly}) + } + if m.Xml != nil { + info = append(info, yaml.MapItem{"xml", m.Xml.ToRawInfo()}) + } + // &{Name:xml Type:Xml StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.ExternalDocs != nil { + info = append(info, yaml.MapItem{"externalDocs", m.ExternalDocs.ToRawInfo()}) + } + // &{Name:externalDocs Type:ExternalDocs StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Example != nil { + info = append(info, yaml.MapItem{"example", m.Example.ToRawInfo()}) + } + // &{Name:example Type:Any StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:VendorExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of SchemaItem suitable for JSON or YAML export. +func (m *SchemaItem) ToRawInfo() interface{} { + // ONE OF WRAPPER + // SchemaItem + // {Name:schema Type:Schema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetSchema() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:fileSchema Type:FileSchema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetFileSchema() + if v1 != nil { + return v1.ToRawInfo() + } + return nil +} + +// ToRawInfo returns a description of SecurityDefinitions suitable for JSON or YAML export. +func (m *SecurityDefinitions) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:additionalProperties Type:NamedSecurityDefinitionsItem StringEnumValues:[] MapType:SecurityDefinitionsItem Repeated:true Pattern: Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of SecurityDefinitionsItem suitable for JSON or YAML export. +func (m *SecurityDefinitionsItem) ToRawInfo() interface{} { + // ONE OF WRAPPER + // SecurityDefinitionsItem + // {Name:basicAuthenticationSecurity Type:BasicAuthenticationSecurity StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetBasicAuthenticationSecurity() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:apiKeySecurity Type:ApiKeySecurity StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetApiKeySecurity() + if v1 != nil { + return v1.ToRawInfo() + } + // {Name:oauth2ImplicitSecurity Type:Oauth2ImplicitSecurity StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v2 := m.GetOauth2ImplicitSecurity() + if v2 != nil { + return v2.ToRawInfo() + } + // {Name:oauth2PasswordSecurity Type:Oauth2PasswordSecurity StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v3 := m.GetOauth2PasswordSecurity() + if v3 != nil { + return v3.ToRawInfo() + } + // {Name:oauth2ApplicationSecurity Type:Oauth2ApplicationSecurity StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v4 := m.GetOauth2ApplicationSecurity() + if v4 != nil { + return v4.ToRawInfo() + } + // {Name:oauth2AccessCodeSecurity Type:Oauth2AccessCodeSecurity StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v5 := m.GetOauth2AccessCodeSecurity() + if v5 != nil { + return v5.ToRawInfo() + } + return nil +} + +// ToRawInfo returns a description of SecurityRequirement suitable for JSON or YAML export. +func (m *SecurityRequirement) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:additionalProperties Type:NamedStringArray StringEnumValues:[] MapType:StringArray Repeated:true Pattern: Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of StringArray suitable for JSON or YAML export. +func (m *StringArray) ToRawInfo() interface{} { + return m.Value +} + +// ToRawInfo returns a description of Tag suitable for JSON or YAML export. +func (m *Tag) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.ExternalDocs != nil { + info = append(info, yaml.MapItem{"externalDocs", m.ExternalDocs.ToRawInfo()}) + } + // &{Name:externalDocs Type:ExternalDocs StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:VendorExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of TypeItem suitable for JSON or YAML export. +func (m *TypeItem) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if len(m.Value) != 0 { + info = append(info, yaml.MapItem{"value", m.Value}) + } + return info +} + +// ToRawInfo returns a description of VendorExtension suitable for JSON or YAML export. +func (m *VendorExtension) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:additionalProperties Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern: Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Xml suitable for JSON or YAML export. +func (m *Xml) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + if m.Namespace != "" { + info = append(info, yaml.MapItem{"namespace", m.Namespace}) + } + if m.Prefix != "" { + info = append(info, yaml.MapItem{"prefix", m.Prefix}) + } + if m.Attribute != false { + info = append(info, yaml.MapItem{"attribute", m.Attribute}) + } + if m.Wrapped != false { + info = append(info, yaml.MapItem{"wrapped", m.Wrapped}) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:VendorExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +var ( + pattern0 = regexp.MustCompile("^x-") + pattern1 = regexp.MustCompile("^/") + pattern2 = regexp.MustCompile("^([0-9]{3})$|^(default)$") +) diff --git a/vendor/github.com/googleapis/gnostic/OpenAPIv2/OpenAPIv2.pb.go b/vendor/github.com/googleapis/gnostic/OpenAPIv2/OpenAPIv2.pb.go new file mode 100644 index 000000000..37da7df25 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/OpenAPIv2/OpenAPIv2.pb.go @@ -0,0 +1,4456 @@ +// Code generated by protoc-gen-go. +// source: OpenAPIv2/OpenAPIv2.proto +// DO NOT EDIT! + +/* +Package openapi_v2 is a generated protocol buffer package. + +It is generated from these files: + OpenAPIv2/OpenAPIv2.proto + +It has these top-level messages: + AdditionalPropertiesItem + Any + ApiKeySecurity + BasicAuthenticationSecurity + BodyParameter + Contact + Default + Definitions + Document + Examples + ExternalDocs + FileSchema + FormDataParameterSubSchema + Header + HeaderParameterSubSchema + Headers + Info + ItemsItem + JsonReference + License + NamedAny + NamedHeader + NamedParameter + NamedPathItem + NamedResponse + NamedResponseValue + NamedSchema + NamedSecurityDefinitionsItem + NamedString + NamedStringArray + NonBodyParameter + Oauth2AccessCodeSecurity + Oauth2ApplicationSecurity + Oauth2ImplicitSecurity + Oauth2PasswordSecurity + Oauth2Scopes + Operation + Parameter + ParameterDefinitions + ParametersItem + PathItem + PathParameterSubSchema + Paths + PrimitivesItems + Properties + QueryParameterSubSchema + Response + ResponseDefinitions + ResponseValue + Responses + Schema + SchemaItem + SecurityDefinitions + SecurityDefinitionsItem + SecurityRequirement + StringArray + Tag + TypeItem + VendorExtension + Xml +*/ +package openapi_v2 + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import google_protobuf "github.com/golang/protobuf/ptypes/any" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type AdditionalPropertiesItem struct { + // Types that are valid to be assigned to Oneof: + // *AdditionalPropertiesItem_Schema + // *AdditionalPropertiesItem_Boolean + Oneof isAdditionalPropertiesItem_Oneof `protobuf_oneof:"oneof"` +} + +func (m *AdditionalPropertiesItem) Reset() { *m = AdditionalPropertiesItem{} } +func (m *AdditionalPropertiesItem) String() string { return proto.CompactTextString(m) } +func (*AdditionalPropertiesItem) ProtoMessage() {} +func (*AdditionalPropertiesItem) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } + +type isAdditionalPropertiesItem_Oneof interface { + isAdditionalPropertiesItem_Oneof() +} + +type AdditionalPropertiesItem_Schema struct { + Schema *Schema `protobuf:"bytes,1,opt,name=schema,oneof"` +} +type AdditionalPropertiesItem_Boolean struct { + Boolean bool `protobuf:"varint,2,opt,name=boolean,oneof"` +} + +func (*AdditionalPropertiesItem_Schema) isAdditionalPropertiesItem_Oneof() {} +func (*AdditionalPropertiesItem_Boolean) isAdditionalPropertiesItem_Oneof() {} + +func (m *AdditionalPropertiesItem) GetOneof() isAdditionalPropertiesItem_Oneof { + if m != nil { + return m.Oneof + } + return nil +} + +func (m *AdditionalPropertiesItem) GetSchema() *Schema { + if x, ok := m.GetOneof().(*AdditionalPropertiesItem_Schema); ok { + return x.Schema + } + return nil +} + +func (m *AdditionalPropertiesItem) GetBoolean() bool { + if x, ok := m.GetOneof().(*AdditionalPropertiesItem_Boolean); ok { + return x.Boolean + } + return false +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AdditionalPropertiesItem) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AdditionalPropertiesItem_OneofMarshaler, _AdditionalPropertiesItem_OneofUnmarshaler, _AdditionalPropertiesItem_OneofSizer, []interface{}{ + (*AdditionalPropertiesItem_Schema)(nil), + (*AdditionalPropertiesItem_Boolean)(nil), + } +} + +func _AdditionalPropertiesItem_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AdditionalPropertiesItem) + // oneof + switch x := m.Oneof.(type) { + case *AdditionalPropertiesItem_Schema: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Schema); err != nil { + return err + } + case *AdditionalPropertiesItem_Boolean: + t := uint64(0) + if x.Boolean { + t = 1 + } + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(t) + case nil: + default: + return fmt.Errorf("AdditionalPropertiesItem.Oneof has unexpected type %T", x) + } + return nil +} + +func _AdditionalPropertiesItem_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AdditionalPropertiesItem) + switch tag { + case 1: // oneof.schema + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Schema) + err := b.DecodeMessage(msg) + m.Oneof = &AdditionalPropertiesItem_Schema{msg} + return true, err + case 2: // oneof.boolean + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Oneof = &AdditionalPropertiesItem_Boolean{x != 0} + return true, err + default: + return false, nil + } +} + +func _AdditionalPropertiesItem_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AdditionalPropertiesItem) + // oneof + switch x := m.Oneof.(type) { + case *AdditionalPropertiesItem_Schema: + s := proto.Size(x.Schema) + n += proto.SizeVarint(1<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *AdditionalPropertiesItem_Boolean: + n += proto.SizeVarint(2<<3 | proto.WireVarint) + n += 1 + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type Any struct { + Value *google_protobuf.Any `protobuf:"bytes,1,opt,name=value" json:"value,omitempty"` + Yaml string `protobuf:"bytes,2,opt,name=yaml" json:"yaml,omitempty"` +} + +func (m *Any) Reset() { *m = Any{} } +func (m *Any) String() string { return proto.CompactTextString(m) } +func (*Any) ProtoMessage() {} +func (*Any) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } + +func (m *Any) GetValue() *google_protobuf.Any { + if m != nil { + return m.Value + } + return nil +} + +func (m *Any) GetYaml() string { + if m != nil { + return m.Yaml + } + return "" +} + +type ApiKeySecurity struct { + Type string `protobuf:"bytes,1,opt,name=type" json:"type,omitempty"` + Name string `protobuf:"bytes,2,opt,name=name" json:"name,omitempty"` + In string `protobuf:"bytes,3,opt,name=in" json:"in,omitempty"` + Description string `protobuf:"bytes,4,opt,name=description" json:"description,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,5,rep,name=vendor_extension,json=vendorExtension" json:"vendor_extension,omitempty"` +} + +func (m *ApiKeySecurity) Reset() { *m = ApiKeySecurity{} } +func (m *ApiKeySecurity) String() string { return proto.CompactTextString(m) } +func (*ApiKeySecurity) ProtoMessage() {} +func (*ApiKeySecurity) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } + +func (m *ApiKeySecurity) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *ApiKeySecurity) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ApiKeySecurity) GetIn() string { + if m != nil { + return m.In + } + return "" +} + +func (m *ApiKeySecurity) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *ApiKeySecurity) GetVendorExtension() []*NamedAny { + if m != nil { + return m.VendorExtension + } + return nil +} + +type BasicAuthenticationSecurity struct { + Type string `protobuf:"bytes,1,opt,name=type" json:"type,omitempty"` + Description string `protobuf:"bytes,2,opt,name=description" json:"description,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,3,rep,name=vendor_extension,json=vendorExtension" json:"vendor_extension,omitempty"` +} + +func (m *BasicAuthenticationSecurity) Reset() { *m = BasicAuthenticationSecurity{} } +func (m *BasicAuthenticationSecurity) String() string { return proto.CompactTextString(m) } +func (*BasicAuthenticationSecurity) ProtoMessage() {} +func (*BasicAuthenticationSecurity) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } + +func (m *BasicAuthenticationSecurity) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *BasicAuthenticationSecurity) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *BasicAuthenticationSecurity) GetVendorExtension() []*NamedAny { + if m != nil { + return m.VendorExtension + } + return nil +} + +type BodyParameter struct { + // A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed. + Description string `protobuf:"bytes,1,opt,name=description" json:"description,omitempty"` + // The name of the parameter. + Name string `protobuf:"bytes,2,opt,name=name" json:"name,omitempty"` + // Determines the location of the parameter. + In string `protobuf:"bytes,3,opt,name=in" json:"in,omitempty"` + // Determines whether or not this parameter is required or optional. + Required bool `protobuf:"varint,4,opt,name=required" json:"required,omitempty"` + Schema *Schema `protobuf:"bytes,5,opt,name=schema" json:"schema,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,6,rep,name=vendor_extension,json=vendorExtension" json:"vendor_extension,omitempty"` +} + +func (m *BodyParameter) Reset() { *m = BodyParameter{} } +func (m *BodyParameter) String() string { return proto.CompactTextString(m) } +func (*BodyParameter) ProtoMessage() {} +func (*BodyParameter) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } + +func (m *BodyParameter) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *BodyParameter) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *BodyParameter) GetIn() string { + if m != nil { + return m.In + } + return "" +} + +func (m *BodyParameter) GetRequired() bool { + if m != nil { + return m.Required + } + return false +} + +func (m *BodyParameter) GetSchema() *Schema { + if m != nil { + return m.Schema + } + return nil +} + +func (m *BodyParameter) GetVendorExtension() []*NamedAny { + if m != nil { + return m.VendorExtension + } + return nil +} + +// Contact information for the owners of the API. +type Contact struct { + // The identifying name of the contact person/organization. + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // The URL pointing to the contact information. + Url string `protobuf:"bytes,2,opt,name=url" json:"url,omitempty"` + // The email address of the contact person/organization. + Email string `protobuf:"bytes,3,opt,name=email" json:"email,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,4,rep,name=vendor_extension,json=vendorExtension" json:"vendor_extension,omitempty"` +} + +func (m *Contact) Reset() { *m = Contact{} } +func (m *Contact) String() string { return proto.CompactTextString(m) } +func (*Contact) ProtoMessage() {} +func (*Contact) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} } + +func (m *Contact) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Contact) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *Contact) GetEmail() string { + if m != nil { + return m.Email + } + return "" +} + +func (m *Contact) GetVendorExtension() []*NamedAny { + if m != nil { + return m.VendorExtension + } + return nil +} + +type Default struct { + AdditionalProperties []*NamedAny `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` +} + +func (m *Default) Reset() { *m = Default{} } +func (m *Default) String() string { return proto.CompactTextString(m) } +func (*Default) ProtoMessage() {} +func (*Default) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{6} } + +func (m *Default) GetAdditionalProperties() []*NamedAny { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +// One or more JSON objects describing the schemas being consumed and produced by the API. +type Definitions struct { + AdditionalProperties []*NamedSchema `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` +} + +func (m *Definitions) Reset() { *m = Definitions{} } +func (m *Definitions) String() string { return proto.CompactTextString(m) } +func (*Definitions) ProtoMessage() {} +func (*Definitions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{7} } + +func (m *Definitions) GetAdditionalProperties() []*NamedSchema { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +type Document struct { + // The Swagger version of this document. + Swagger string `protobuf:"bytes,1,opt,name=swagger" json:"swagger,omitempty"` + Info *Info `protobuf:"bytes,2,opt,name=info" json:"info,omitempty"` + // The host (name or ip) of the API. Example: 'swagger.io' + Host string `protobuf:"bytes,3,opt,name=host" json:"host,omitempty"` + // The base path to the API. Example: '/api'. + BasePath string `protobuf:"bytes,4,opt,name=base_path,json=basePath" json:"base_path,omitempty"` + // The transfer protocol of the API. + Schemes []string `protobuf:"bytes,5,rep,name=schemes" json:"schemes,omitempty"` + // A list of MIME types accepted by the API. + Consumes []string `protobuf:"bytes,6,rep,name=consumes" json:"consumes,omitempty"` + // A list of MIME types the API can produce. + Produces []string `protobuf:"bytes,7,rep,name=produces" json:"produces,omitempty"` + Paths *Paths `protobuf:"bytes,8,opt,name=paths" json:"paths,omitempty"` + Definitions *Definitions `protobuf:"bytes,9,opt,name=definitions" json:"definitions,omitempty"` + Parameters *ParameterDefinitions `protobuf:"bytes,10,opt,name=parameters" json:"parameters,omitempty"` + Responses *ResponseDefinitions `protobuf:"bytes,11,opt,name=responses" json:"responses,omitempty"` + Security []*SecurityRequirement `protobuf:"bytes,12,rep,name=security" json:"security,omitempty"` + SecurityDefinitions *SecurityDefinitions `protobuf:"bytes,13,opt,name=security_definitions,json=securityDefinitions" json:"security_definitions,omitempty"` + Tags []*Tag `protobuf:"bytes,14,rep,name=tags" json:"tags,omitempty"` + ExternalDocs *ExternalDocs `protobuf:"bytes,15,opt,name=external_docs,json=externalDocs" json:"external_docs,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,16,rep,name=vendor_extension,json=vendorExtension" json:"vendor_extension,omitempty"` +} + +func (m *Document) Reset() { *m = Document{} } +func (m *Document) String() string { return proto.CompactTextString(m) } +func (*Document) ProtoMessage() {} +func (*Document) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{8} } + +func (m *Document) GetSwagger() string { + if m != nil { + return m.Swagger + } + return "" +} + +func (m *Document) GetInfo() *Info { + if m != nil { + return m.Info + } + return nil +} + +func (m *Document) GetHost() string { + if m != nil { + return m.Host + } + return "" +} + +func (m *Document) GetBasePath() string { + if m != nil { + return m.BasePath + } + return "" +} + +func (m *Document) GetSchemes() []string { + if m != nil { + return m.Schemes + } + return nil +} + +func (m *Document) GetConsumes() []string { + if m != nil { + return m.Consumes + } + return nil +} + +func (m *Document) GetProduces() []string { + if m != nil { + return m.Produces + } + return nil +} + +func (m *Document) GetPaths() *Paths { + if m != nil { + return m.Paths + } + return nil +} + +func (m *Document) GetDefinitions() *Definitions { + if m != nil { + return m.Definitions + } + return nil +} + +func (m *Document) GetParameters() *ParameterDefinitions { + if m != nil { + return m.Parameters + } + return nil +} + +func (m *Document) GetResponses() *ResponseDefinitions { + if m != nil { + return m.Responses + } + return nil +} + +func (m *Document) GetSecurity() []*SecurityRequirement { + if m != nil { + return m.Security + } + return nil +} + +func (m *Document) GetSecurityDefinitions() *SecurityDefinitions { + if m != nil { + return m.SecurityDefinitions + } + return nil +} + +func (m *Document) GetTags() []*Tag { + if m != nil { + return m.Tags + } + return nil +} + +func (m *Document) GetExternalDocs() *ExternalDocs { + if m != nil { + return m.ExternalDocs + } + return nil +} + +func (m *Document) GetVendorExtension() []*NamedAny { + if m != nil { + return m.VendorExtension + } + return nil +} + +type Examples struct { + AdditionalProperties []*NamedAny `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` +} + +func (m *Examples) Reset() { *m = Examples{} } +func (m *Examples) String() string { return proto.CompactTextString(m) } +func (*Examples) ProtoMessage() {} +func (*Examples) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{9} } + +func (m *Examples) GetAdditionalProperties() []*NamedAny { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +// information about external documentation +type ExternalDocs struct { + Description string `protobuf:"bytes,1,opt,name=description" json:"description,omitempty"` + Url string `protobuf:"bytes,2,opt,name=url" json:"url,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,3,rep,name=vendor_extension,json=vendorExtension" json:"vendor_extension,omitempty"` +} + +func (m *ExternalDocs) Reset() { *m = ExternalDocs{} } +func (m *ExternalDocs) String() string { return proto.CompactTextString(m) } +func (*ExternalDocs) ProtoMessage() {} +func (*ExternalDocs) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{10} } + +func (m *ExternalDocs) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *ExternalDocs) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *ExternalDocs) GetVendorExtension() []*NamedAny { + if m != nil { + return m.VendorExtension + } + return nil +} + +// A deterministic version of a JSON Schema object. +type FileSchema struct { + Format string `protobuf:"bytes,1,opt,name=format" json:"format,omitempty"` + Title string `protobuf:"bytes,2,opt,name=title" json:"title,omitempty"` + Description string `protobuf:"bytes,3,opt,name=description" json:"description,omitempty"` + Default *Any `protobuf:"bytes,4,opt,name=default" json:"default,omitempty"` + Required []string `protobuf:"bytes,5,rep,name=required" json:"required,omitempty"` + Type string `protobuf:"bytes,6,opt,name=type" json:"type,omitempty"` + ReadOnly bool `protobuf:"varint,7,opt,name=read_only,json=readOnly" json:"read_only,omitempty"` + ExternalDocs *ExternalDocs `protobuf:"bytes,8,opt,name=external_docs,json=externalDocs" json:"external_docs,omitempty"` + Example *Any `protobuf:"bytes,9,opt,name=example" json:"example,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,10,rep,name=vendor_extension,json=vendorExtension" json:"vendor_extension,omitempty"` +} + +func (m *FileSchema) Reset() { *m = FileSchema{} } +func (m *FileSchema) String() string { return proto.CompactTextString(m) } +func (*FileSchema) ProtoMessage() {} +func (*FileSchema) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{11} } + +func (m *FileSchema) GetFormat() string { + if m != nil { + return m.Format + } + return "" +} + +func (m *FileSchema) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *FileSchema) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *FileSchema) GetDefault() *Any { + if m != nil { + return m.Default + } + return nil +} + +func (m *FileSchema) GetRequired() []string { + if m != nil { + return m.Required + } + return nil +} + +func (m *FileSchema) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *FileSchema) GetReadOnly() bool { + if m != nil { + return m.ReadOnly + } + return false +} + +func (m *FileSchema) GetExternalDocs() *ExternalDocs { + if m != nil { + return m.ExternalDocs + } + return nil +} + +func (m *FileSchema) GetExample() *Any { + if m != nil { + return m.Example + } + return nil +} + +func (m *FileSchema) GetVendorExtension() []*NamedAny { + if m != nil { + return m.VendorExtension + } + return nil +} + +type FormDataParameterSubSchema struct { + // Determines whether or not this parameter is required or optional. + Required bool `protobuf:"varint,1,opt,name=required" json:"required,omitempty"` + // Determines the location of the parameter. + In string `protobuf:"bytes,2,opt,name=in" json:"in,omitempty"` + // A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed. + Description string `protobuf:"bytes,3,opt,name=description" json:"description,omitempty"` + // The name of the parameter. + Name string `protobuf:"bytes,4,opt,name=name" json:"name,omitempty"` + // allows sending a parameter by name only or with an empty value. + AllowEmptyValue bool `protobuf:"varint,5,opt,name=allow_empty_value,json=allowEmptyValue" json:"allow_empty_value,omitempty"` + Type string `protobuf:"bytes,6,opt,name=type" json:"type,omitempty"` + Format string `protobuf:"bytes,7,opt,name=format" json:"format,omitempty"` + Items *PrimitivesItems `protobuf:"bytes,8,opt,name=items" json:"items,omitempty"` + CollectionFormat string `protobuf:"bytes,9,opt,name=collection_format,json=collectionFormat" json:"collection_format,omitempty"` + Default *Any `protobuf:"bytes,10,opt,name=default" json:"default,omitempty"` + Maximum float64 `protobuf:"fixed64,11,opt,name=maximum" json:"maximum,omitempty"` + ExclusiveMaximum bool `protobuf:"varint,12,opt,name=exclusive_maximum,json=exclusiveMaximum" json:"exclusive_maximum,omitempty"` + Minimum float64 `protobuf:"fixed64,13,opt,name=minimum" json:"minimum,omitempty"` + ExclusiveMinimum bool `protobuf:"varint,14,opt,name=exclusive_minimum,json=exclusiveMinimum" json:"exclusive_minimum,omitempty"` + MaxLength int64 `protobuf:"varint,15,opt,name=max_length,json=maxLength" json:"max_length,omitempty"` + MinLength int64 `protobuf:"varint,16,opt,name=min_length,json=minLength" json:"min_length,omitempty"` + Pattern string `protobuf:"bytes,17,opt,name=pattern" json:"pattern,omitempty"` + MaxItems int64 `protobuf:"varint,18,opt,name=max_items,json=maxItems" json:"max_items,omitempty"` + MinItems int64 `protobuf:"varint,19,opt,name=min_items,json=minItems" json:"min_items,omitempty"` + UniqueItems bool `protobuf:"varint,20,opt,name=unique_items,json=uniqueItems" json:"unique_items,omitempty"` + Enum []*Any `protobuf:"bytes,21,rep,name=enum" json:"enum,omitempty"` + MultipleOf float64 `protobuf:"fixed64,22,opt,name=multiple_of,json=multipleOf" json:"multiple_of,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,23,rep,name=vendor_extension,json=vendorExtension" json:"vendor_extension,omitempty"` +} + +func (m *FormDataParameterSubSchema) Reset() { *m = FormDataParameterSubSchema{} } +func (m *FormDataParameterSubSchema) String() string { return proto.CompactTextString(m) } +func (*FormDataParameterSubSchema) ProtoMessage() {} +func (*FormDataParameterSubSchema) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{12} } + +func (m *FormDataParameterSubSchema) GetRequired() bool { + if m != nil { + return m.Required + } + return false +} + +func (m *FormDataParameterSubSchema) GetIn() string { + if m != nil { + return m.In + } + return "" +} + +func (m *FormDataParameterSubSchema) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *FormDataParameterSubSchema) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *FormDataParameterSubSchema) GetAllowEmptyValue() bool { + if m != nil { + return m.AllowEmptyValue + } + return false +} + +func (m *FormDataParameterSubSchema) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *FormDataParameterSubSchema) GetFormat() string { + if m != nil { + return m.Format + } + return "" +} + +func (m *FormDataParameterSubSchema) GetItems() *PrimitivesItems { + if m != nil { + return m.Items + } + return nil +} + +func (m *FormDataParameterSubSchema) GetCollectionFormat() string { + if m != nil { + return m.CollectionFormat + } + return "" +} + +func (m *FormDataParameterSubSchema) GetDefault() *Any { + if m != nil { + return m.Default + } + return nil +} + +func (m *FormDataParameterSubSchema) GetMaximum() float64 { + if m != nil { + return m.Maximum + } + return 0 +} + +func (m *FormDataParameterSubSchema) GetExclusiveMaximum() bool { + if m != nil { + return m.ExclusiveMaximum + } + return false +} + +func (m *FormDataParameterSubSchema) GetMinimum() float64 { + if m != nil { + return m.Minimum + } + return 0 +} + +func (m *FormDataParameterSubSchema) GetExclusiveMinimum() bool { + if m != nil { + return m.ExclusiveMinimum + } + return false +} + +func (m *FormDataParameterSubSchema) GetMaxLength() int64 { + if m != nil { + return m.MaxLength + } + return 0 +} + +func (m *FormDataParameterSubSchema) GetMinLength() int64 { + if m != nil { + return m.MinLength + } + return 0 +} + +func (m *FormDataParameterSubSchema) GetPattern() string { + if m != nil { + return m.Pattern + } + return "" +} + +func (m *FormDataParameterSubSchema) GetMaxItems() int64 { + if m != nil { + return m.MaxItems + } + return 0 +} + +func (m *FormDataParameterSubSchema) GetMinItems() int64 { + if m != nil { + return m.MinItems + } + return 0 +} + +func (m *FormDataParameterSubSchema) GetUniqueItems() bool { + if m != nil { + return m.UniqueItems + } + return false +} + +func (m *FormDataParameterSubSchema) GetEnum() []*Any { + if m != nil { + return m.Enum + } + return nil +} + +func (m *FormDataParameterSubSchema) GetMultipleOf() float64 { + if m != nil { + return m.MultipleOf + } + return 0 +} + +func (m *FormDataParameterSubSchema) GetVendorExtension() []*NamedAny { + if m != nil { + return m.VendorExtension + } + return nil +} + +type Header struct { + Type string `protobuf:"bytes,1,opt,name=type" json:"type,omitempty"` + Format string `protobuf:"bytes,2,opt,name=format" json:"format,omitempty"` + Items *PrimitivesItems `protobuf:"bytes,3,opt,name=items" json:"items,omitempty"` + CollectionFormat string `protobuf:"bytes,4,opt,name=collection_format,json=collectionFormat" json:"collection_format,omitempty"` + Default *Any `protobuf:"bytes,5,opt,name=default" json:"default,omitempty"` + Maximum float64 `protobuf:"fixed64,6,opt,name=maximum" json:"maximum,omitempty"` + ExclusiveMaximum bool `protobuf:"varint,7,opt,name=exclusive_maximum,json=exclusiveMaximum" json:"exclusive_maximum,omitempty"` + Minimum float64 `protobuf:"fixed64,8,opt,name=minimum" json:"minimum,omitempty"` + ExclusiveMinimum bool `protobuf:"varint,9,opt,name=exclusive_minimum,json=exclusiveMinimum" json:"exclusive_minimum,omitempty"` + MaxLength int64 `protobuf:"varint,10,opt,name=max_length,json=maxLength" json:"max_length,omitempty"` + MinLength int64 `protobuf:"varint,11,opt,name=min_length,json=minLength" json:"min_length,omitempty"` + Pattern string `protobuf:"bytes,12,opt,name=pattern" json:"pattern,omitempty"` + MaxItems int64 `protobuf:"varint,13,opt,name=max_items,json=maxItems" json:"max_items,omitempty"` + MinItems int64 `protobuf:"varint,14,opt,name=min_items,json=minItems" json:"min_items,omitempty"` + UniqueItems bool `protobuf:"varint,15,opt,name=unique_items,json=uniqueItems" json:"unique_items,omitempty"` + Enum []*Any `protobuf:"bytes,16,rep,name=enum" json:"enum,omitempty"` + MultipleOf float64 `protobuf:"fixed64,17,opt,name=multiple_of,json=multipleOf" json:"multiple_of,omitempty"` + Description string `protobuf:"bytes,18,opt,name=description" json:"description,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,19,rep,name=vendor_extension,json=vendorExtension" json:"vendor_extension,omitempty"` +} + +func (m *Header) Reset() { *m = Header{} } +func (m *Header) String() string { return proto.CompactTextString(m) } +func (*Header) ProtoMessage() {} +func (*Header) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{13} } + +func (m *Header) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *Header) GetFormat() string { + if m != nil { + return m.Format + } + return "" +} + +func (m *Header) GetItems() *PrimitivesItems { + if m != nil { + return m.Items + } + return nil +} + +func (m *Header) GetCollectionFormat() string { + if m != nil { + return m.CollectionFormat + } + return "" +} + +func (m *Header) GetDefault() *Any { + if m != nil { + return m.Default + } + return nil +} + +func (m *Header) GetMaximum() float64 { + if m != nil { + return m.Maximum + } + return 0 +} + +func (m *Header) GetExclusiveMaximum() bool { + if m != nil { + return m.ExclusiveMaximum + } + return false +} + +func (m *Header) GetMinimum() float64 { + if m != nil { + return m.Minimum + } + return 0 +} + +func (m *Header) GetExclusiveMinimum() bool { + if m != nil { + return m.ExclusiveMinimum + } + return false +} + +func (m *Header) GetMaxLength() int64 { + if m != nil { + return m.MaxLength + } + return 0 +} + +func (m *Header) GetMinLength() int64 { + if m != nil { + return m.MinLength + } + return 0 +} + +func (m *Header) GetPattern() string { + if m != nil { + return m.Pattern + } + return "" +} + +func (m *Header) GetMaxItems() int64 { + if m != nil { + return m.MaxItems + } + return 0 +} + +func (m *Header) GetMinItems() int64 { + if m != nil { + return m.MinItems + } + return 0 +} + +func (m *Header) GetUniqueItems() bool { + if m != nil { + return m.UniqueItems + } + return false +} + +func (m *Header) GetEnum() []*Any { + if m != nil { + return m.Enum + } + return nil +} + +func (m *Header) GetMultipleOf() float64 { + if m != nil { + return m.MultipleOf + } + return 0 +} + +func (m *Header) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Header) GetVendorExtension() []*NamedAny { + if m != nil { + return m.VendorExtension + } + return nil +} + +type HeaderParameterSubSchema struct { + // Determines whether or not this parameter is required or optional. + Required bool `protobuf:"varint,1,opt,name=required" json:"required,omitempty"` + // Determines the location of the parameter. + In string `protobuf:"bytes,2,opt,name=in" json:"in,omitempty"` + // A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed. + Description string `protobuf:"bytes,3,opt,name=description" json:"description,omitempty"` + // The name of the parameter. + Name string `protobuf:"bytes,4,opt,name=name" json:"name,omitempty"` + Type string `protobuf:"bytes,5,opt,name=type" json:"type,omitempty"` + Format string `protobuf:"bytes,6,opt,name=format" json:"format,omitempty"` + Items *PrimitivesItems `protobuf:"bytes,7,opt,name=items" json:"items,omitempty"` + CollectionFormat string `protobuf:"bytes,8,opt,name=collection_format,json=collectionFormat" json:"collection_format,omitempty"` + Default *Any `protobuf:"bytes,9,opt,name=default" json:"default,omitempty"` + Maximum float64 `protobuf:"fixed64,10,opt,name=maximum" json:"maximum,omitempty"` + ExclusiveMaximum bool `protobuf:"varint,11,opt,name=exclusive_maximum,json=exclusiveMaximum" json:"exclusive_maximum,omitempty"` + Minimum float64 `protobuf:"fixed64,12,opt,name=minimum" json:"minimum,omitempty"` + ExclusiveMinimum bool `protobuf:"varint,13,opt,name=exclusive_minimum,json=exclusiveMinimum" json:"exclusive_minimum,omitempty"` + MaxLength int64 `protobuf:"varint,14,opt,name=max_length,json=maxLength" json:"max_length,omitempty"` + MinLength int64 `protobuf:"varint,15,opt,name=min_length,json=minLength" json:"min_length,omitempty"` + Pattern string `protobuf:"bytes,16,opt,name=pattern" json:"pattern,omitempty"` + MaxItems int64 `protobuf:"varint,17,opt,name=max_items,json=maxItems" json:"max_items,omitempty"` + MinItems int64 `protobuf:"varint,18,opt,name=min_items,json=minItems" json:"min_items,omitempty"` + UniqueItems bool `protobuf:"varint,19,opt,name=unique_items,json=uniqueItems" json:"unique_items,omitempty"` + Enum []*Any `protobuf:"bytes,20,rep,name=enum" json:"enum,omitempty"` + MultipleOf float64 `protobuf:"fixed64,21,opt,name=multiple_of,json=multipleOf" json:"multiple_of,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,22,rep,name=vendor_extension,json=vendorExtension" json:"vendor_extension,omitempty"` +} + +func (m *HeaderParameterSubSchema) Reset() { *m = HeaderParameterSubSchema{} } +func (m *HeaderParameterSubSchema) String() string { return proto.CompactTextString(m) } +func (*HeaderParameterSubSchema) ProtoMessage() {} +func (*HeaderParameterSubSchema) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{14} } + +func (m *HeaderParameterSubSchema) GetRequired() bool { + if m != nil { + return m.Required + } + return false +} + +func (m *HeaderParameterSubSchema) GetIn() string { + if m != nil { + return m.In + } + return "" +} + +func (m *HeaderParameterSubSchema) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *HeaderParameterSubSchema) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *HeaderParameterSubSchema) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *HeaderParameterSubSchema) GetFormat() string { + if m != nil { + return m.Format + } + return "" +} + +func (m *HeaderParameterSubSchema) GetItems() *PrimitivesItems { + if m != nil { + return m.Items + } + return nil +} + +func (m *HeaderParameterSubSchema) GetCollectionFormat() string { + if m != nil { + return m.CollectionFormat + } + return "" +} + +func (m *HeaderParameterSubSchema) GetDefault() *Any { + if m != nil { + return m.Default + } + return nil +} + +func (m *HeaderParameterSubSchema) GetMaximum() float64 { + if m != nil { + return m.Maximum + } + return 0 +} + +func (m *HeaderParameterSubSchema) GetExclusiveMaximum() bool { + if m != nil { + return m.ExclusiveMaximum + } + return false +} + +func (m *HeaderParameterSubSchema) GetMinimum() float64 { + if m != nil { + return m.Minimum + } + return 0 +} + +func (m *HeaderParameterSubSchema) GetExclusiveMinimum() bool { + if m != nil { + return m.ExclusiveMinimum + } + return false +} + +func (m *HeaderParameterSubSchema) GetMaxLength() int64 { + if m != nil { + return m.MaxLength + } + return 0 +} + +func (m *HeaderParameterSubSchema) GetMinLength() int64 { + if m != nil { + return m.MinLength + } + return 0 +} + +func (m *HeaderParameterSubSchema) GetPattern() string { + if m != nil { + return m.Pattern + } + return "" +} + +func (m *HeaderParameterSubSchema) GetMaxItems() int64 { + if m != nil { + return m.MaxItems + } + return 0 +} + +func (m *HeaderParameterSubSchema) GetMinItems() int64 { + if m != nil { + return m.MinItems + } + return 0 +} + +func (m *HeaderParameterSubSchema) GetUniqueItems() bool { + if m != nil { + return m.UniqueItems + } + return false +} + +func (m *HeaderParameterSubSchema) GetEnum() []*Any { + if m != nil { + return m.Enum + } + return nil +} + +func (m *HeaderParameterSubSchema) GetMultipleOf() float64 { + if m != nil { + return m.MultipleOf + } + return 0 +} + +func (m *HeaderParameterSubSchema) GetVendorExtension() []*NamedAny { + if m != nil { + return m.VendorExtension + } + return nil +} + +type Headers struct { + AdditionalProperties []*NamedHeader `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` +} + +func (m *Headers) Reset() { *m = Headers{} } +func (m *Headers) String() string { return proto.CompactTextString(m) } +func (*Headers) ProtoMessage() {} +func (*Headers) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{15} } + +func (m *Headers) GetAdditionalProperties() []*NamedHeader { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +// General information about the API. +type Info struct { + // A unique and precise title of the API. + Title string `protobuf:"bytes,1,opt,name=title" json:"title,omitempty"` + // A semantic version number of the API. + Version string `protobuf:"bytes,2,opt,name=version" json:"version,omitempty"` + // A longer description of the API. Should be different from the title. GitHub Flavored Markdown is allowed. + Description string `protobuf:"bytes,3,opt,name=description" json:"description,omitempty"` + // The terms of service for the API. + TermsOfService string `protobuf:"bytes,4,opt,name=terms_of_service,json=termsOfService" json:"terms_of_service,omitempty"` + Contact *Contact `protobuf:"bytes,5,opt,name=contact" json:"contact,omitempty"` + License *License `protobuf:"bytes,6,opt,name=license" json:"license,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,7,rep,name=vendor_extension,json=vendorExtension" json:"vendor_extension,omitempty"` +} + +func (m *Info) Reset() { *m = Info{} } +func (m *Info) String() string { return proto.CompactTextString(m) } +func (*Info) ProtoMessage() {} +func (*Info) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{16} } + +func (m *Info) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *Info) GetVersion() string { + if m != nil { + return m.Version + } + return "" +} + +func (m *Info) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Info) GetTermsOfService() string { + if m != nil { + return m.TermsOfService + } + return "" +} + +func (m *Info) GetContact() *Contact { + if m != nil { + return m.Contact + } + return nil +} + +func (m *Info) GetLicense() *License { + if m != nil { + return m.License + } + return nil +} + +func (m *Info) GetVendorExtension() []*NamedAny { + if m != nil { + return m.VendorExtension + } + return nil +} + +type ItemsItem struct { + Schema []*Schema `protobuf:"bytes,1,rep,name=schema" json:"schema,omitempty"` +} + +func (m *ItemsItem) Reset() { *m = ItemsItem{} } +func (m *ItemsItem) String() string { return proto.CompactTextString(m) } +func (*ItemsItem) ProtoMessage() {} +func (*ItemsItem) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{17} } + +func (m *ItemsItem) GetSchema() []*Schema { + if m != nil { + return m.Schema + } + return nil +} + +type JsonReference struct { + XRef string `protobuf:"bytes,1,opt,name=_ref,json=Ref" json:"_ref,omitempty"` + Description string `protobuf:"bytes,2,opt,name=description" json:"description,omitempty"` +} + +func (m *JsonReference) Reset() { *m = JsonReference{} } +func (m *JsonReference) String() string { return proto.CompactTextString(m) } +func (*JsonReference) ProtoMessage() {} +func (*JsonReference) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{18} } + +func (m *JsonReference) GetXRef() string { + if m != nil { + return m.XRef + } + return "" +} + +func (m *JsonReference) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +type License struct { + // The name of the license type. It's encouraged to use an OSI compatible license. + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // The URL pointing to the license. + Url string `protobuf:"bytes,2,opt,name=url" json:"url,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,3,rep,name=vendor_extension,json=vendorExtension" json:"vendor_extension,omitempty"` +} + +func (m *License) Reset() { *m = License{} } +func (m *License) String() string { return proto.CompactTextString(m) } +func (*License) ProtoMessage() {} +func (*License) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{19} } + +func (m *License) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *License) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *License) GetVendorExtension() []*NamedAny { + if m != nil { + return m.VendorExtension + } + return nil +} + +// Automatically-generated message used to represent maps of Any as ordered (name,value) pairs. +type NamedAny struct { + // Map key + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Mapped value + Value *Any `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` +} + +func (m *NamedAny) Reset() { *m = NamedAny{} } +func (m *NamedAny) String() string { return proto.CompactTextString(m) } +func (*NamedAny) ProtoMessage() {} +func (*NamedAny) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{20} } + +func (m *NamedAny) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NamedAny) GetValue() *Any { + if m != nil { + return m.Value + } + return nil +} + +// Automatically-generated message used to represent maps of Header as ordered (name,value) pairs. +type NamedHeader struct { + // Map key + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Mapped value + Value *Header `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` +} + +func (m *NamedHeader) Reset() { *m = NamedHeader{} } +func (m *NamedHeader) String() string { return proto.CompactTextString(m) } +func (*NamedHeader) ProtoMessage() {} +func (*NamedHeader) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{21} } + +func (m *NamedHeader) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NamedHeader) GetValue() *Header { + if m != nil { + return m.Value + } + return nil +} + +// Automatically-generated message used to represent maps of Parameter as ordered (name,value) pairs. +type NamedParameter struct { + // Map key + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Mapped value + Value *Parameter `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` +} + +func (m *NamedParameter) Reset() { *m = NamedParameter{} } +func (m *NamedParameter) String() string { return proto.CompactTextString(m) } +func (*NamedParameter) ProtoMessage() {} +func (*NamedParameter) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{22} } + +func (m *NamedParameter) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NamedParameter) GetValue() *Parameter { + if m != nil { + return m.Value + } + return nil +} + +// Automatically-generated message used to represent maps of PathItem as ordered (name,value) pairs. +type NamedPathItem struct { + // Map key + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Mapped value + Value *PathItem `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` +} + +func (m *NamedPathItem) Reset() { *m = NamedPathItem{} } +func (m *NamedPathItem) String() string { return proto.CompactTextString(m) } +func (*NamedPathItem) ProtoMessage() {} +func (*NamedPathItem) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{23} } + +func (m *NamedPathItem) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NamedPathItem) GetValue() *PathItem { + if m != nil { + return m.Value + } + return nil +} + +// Automatically-generated message used to represent maps of Response as ordered (name,value) pairs. +type NamedResponse struct { + // Map key + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Mapped value + Value *Response `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` +} + +func (m *NamedResponse) Reset() { *m = NamedResponse{} } +func (m *NamedResponse) String() string { return proto.CompactTextString(m) } +func (*NamedResponse) ProtoMessage() {} +func (*NamedResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{24} } + +func (m *NamedResponse) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NamedResponse) GetValue() *Response { + if m != nil { + return m.Value + } + return nil +} + +// Automatically-generated message used to represent maps of ResponseValue as ordered (name,value) pairs. +type NamedResponseValue struct { + // Map key + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Mapped value + Value *ResponseValue `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` +} + +func (m *NamedResponseValue) Reset() { *m = NamedResponseValue{} } +func (m *NamedResponseValue) String() string { return proto.CompactTextString(m) } +func (*NamedResponseValue) ProtoMessage() {} +func (*NamedResponseValue) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{25} } + +func (m *NamedResponseValue) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NamedResponseValue) GetValue() *ResponseValue { + if m != nil { + return m.Value + } + return nil +} + +// Automatically-generated message used to represent maps of Schema as ordered (name,value) pairs. +type NamedSchema struct { + // Map key + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Mapped value + Value *Schema `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` +} + +func (m *NamedSchema) Reset() { *m = NamedSchema{} } +func (m *NamedSchema) String() string { return proto.CompactTextString(m) } +func (*NamedSchema) ProtoMessage() {} +func (*NamedSchema) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{26} } + +func (m *NamedSchema) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NamedSchema) GetValue() *Schema { + if m != nil { + return m.Value + } + return nil +} + +// Automatically-generated message used to represent maps of SecurityDefinitionsItem as ordered (name,value) pairs. +type NamedSecurityDefinitionsItem struct { + // Map key + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Mapped value + Value *SecurityDefinitionsItem `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` +} + +func (m *NamedSecurityDefinitionsItem) Reset() { *m = NamedSecurityDefinitionsItem{} } +func (m *NamedSecurityDefinitionsItem) String() string { return proto.CompactTextString(m) } +func (*NamedSecurityDefinitionsItem) ProtoMessage() {} +func (*NamedSecurityDefinitionsItem) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{27} } + +func (m *NamedSecurityDefinitionsItem) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NamedSecurityDefinitionsItem) GetValue() *SecurityDefinitionsItem { + if m != nil { + return m.Value + } + return nil +} + +// Automatically-generated message used to represent maps of string as ordered (name,value) pairs. +type NamedString struct { + // Map key + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Mapped value + Value string `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` +} + +func (m *NamedString) Reset() { *m = NamedString{} } +func (m *NamedString) String() string { return proto.CompactTextString(m) } +func (*NamedString) ProtoMessage() {} +func (*NamedString) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{28} } + +func (m *NamedString) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NamedString) GetValue() string { + if m != nil { + return m.Value + } + return "" +} + +// Automatically-generated message used to represent maps of StringArray as ordered (name,value) pairs. +type NamedStringArray struct { + // Map key + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Mapped value + Value *StringArray `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` +} + +func (m *NamedStringArray) Reset() { *m = NamedStringArray{} } +func (m *NamedStringArray) String() string { return proto.CompactTextString(m) } +func (*NamedStringArray) ProtoMessage() {} +func (*NamedStringArray) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{29} } + +func (m *NamedStringArray) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NamedStringArray) GetValue() *StringArray { + if m != nil { + return m.Value + } + return nil +} + +type NonBodyParameter struct { + // Types that are valid to be assigned to Oneof: + // *NonBodyParameter_HeaderParameterSubSchema + // *NonBodyParameter_FormDataParameterSubSchema + // *NonBodyParameter_QueryParameterSubSchema + // *NonBodyParameter_PathParameterSubSchema + Oneof isNonBodyParameter_Oneof `protobuf_oneof:"oneof"` +} + +func (m *NonBodyParameter) Reset() { *m = NonBodyParameter{} } +func (m *NonBodyParameter) String() string { return proto.CompactTextString(m) } +func (*NonBodyParameter) ProtoMessage() {} +func (*NonBodyParameter) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{30} } + +type isNonBodyParameter_Oneof interface { + isNonBodyParameter_Oneof() +} + +type NonBodyParameter_HeaderParameterSubSchema struct { + HeaderParameterSubSchema *HeaderParameterSubSchema `protobuf:"bytes,1,opt,name=header_parameter_sub_schema,json=headerParameterSubSchema,oneof"` +} +type NonBodyParameter_FormDataParameterSubSchema struct { + FormDataParameterSubSchema *FormDataParameterSubSchema `protobuf:"bytes,2,opt,name=form_data_parameter_sub_schema,json=formDataParameterSubSchema,oneof"` +} +type NonBodyParameter_QueryParameterSubSchema struct { + QueryParameterSubSchema *QueryParameterSubSchema `protobuf:"bytes,3,opt,name=query_parameter_sub_schema,json=queryParameterSubSchema,oneof"` +} +type NonBodyParameter_PathParameterSubSchema struct { + PathParameterSubSchema *PathParameterSubSchema `protobuf:"bytes,4,opt,name=path_parameter_sub_schema,json=pathParameterSubSchema,oneof"` +} + +func (*NonBodyParameter_HeaderParameterSubSchema) isNonBodyParameter_Oneof() {} +func (*NonBodyParameter_FormDataParameterSubSchema) isNonBodyParameter_Oneof() {} +func (*NonBodyParameter_QueryParameterSubSchema) isNonBodyParameter_Oneof() {} +func (*NonBodyParameter_PathParameterSubSchema) isNonBodyParameter_Oneof() {} + +func (m *NonBodyParameter) GetOneof() isNonBodyParameter_Oneof { + if m != nil { + return m.Oneof + } + return nil +} + +func (m *NonBodyParameter) GetHeaderParameterSubSchema() *HeaderParameterSubSchema { + if x, ok := m.GetOneof().(*NonBodyParameter_HeaderParameterSubSchema); ok { + return x.HeaderParameterSubSchema + } + return nil +} + +func (m *NonBodyParameter) GetFormDataParameterSubSchema() *FormDataParameterSubSchema { + if x, ok := m.GetOneof().(*NonBodyParameter_FormDataParameterSubSchema); ok { + return x.FormDataParameterSubSchema + } + return nil +} + +func (m *NonBodyParameter) GetQueryParameterSubSchema() *QueryParameterSubSchema { + if x, ok := m.GetOneof().(*NonBodyParameter_QueryParameterSubSchema); ok { + return x.QueryParameterSubSchema + } + return nil +} + +func (m *NonBodyParameter) GetPathParameterSubSchema() *PathParameterSubSchema { + if x, ok := m.GetOneof().(*NonBodyParameter_PathParameterSubSchema); ok { + return x.PathParameterSubSchema + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*NonBodyParameter) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _NonBodyParameter_OneofMarshaler, _NonBodyParameter_OneofUnmarshaler, _NonBodyParameter_OneofSizer, []interface{}{ + (*NonBodyParameter_HeaderParameterSubSchema)(nil), + (*NonBodyParameter_FormDataParameterSubSchema)(nil), + (*NonBodyParameter_QueryParameterSubSchema)(nil), + (*NonBodyParameter_PathParameterSubSchema)(nil), + } +} + +func _NonBodyParameter_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*NonBodyParameter) + // oneof + switch x := m.Oneof.(type) { + case *NonBodyParameter_HeaderParameterSubSchema: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.HeaderParameterSubSchema); err != nil { + return err + } + case *NonBodyParameter_FormDataParameterSubSchema: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.FormDataParameterSubSchema); err != nil { + return err + } + case *NonBodyParameter_QueryParameterSubSchema: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.QueryParameterSubSchema); err != nil { + return err + } + case *NonBodyParameter_PathParameterSubSchema: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PathParameterSubSchema); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("NonBodyParameter.Oneof has unexpected type %T", x) + } + return nil +} + +func _NonBodyParameter_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*NonBodyParameter) + switch tag { + case 1: // oneof.header_parameter_sub_schema + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(HeaderParameterSubSchema) + err := b.DecodeMessage(msg) + m.Oneof = &NonBodyParameter_HeaderParameterSubSchema{msg} + return true, err + case 2: // oneof.form_data_parameter_sub_schema + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(FormDataParameterSubSchema) + err := b.DecodeMessage(msg) + m.Oneof = &NonBodyParameter_FormDataParameterSubSchema{msg} + return true, err + case 3: // oneof.query_parameter_sub_schema + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(QueryParameterSubSchema) + err := b.DecodeMessage(msg) + m.Oneof = &NonBodyParameter_QueryParameterSubSchema{msg} + return true, err + case 4: // oneof.path_parameter_sub_schema + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PathParameterSubSchema) + err := b.DecodeMessage(msg) + m.Oneof = &NonBodyParameter_PathParameterSubSchema{msg} + return true, err + default: + return false, nil + } +} + +func _NonBodyParameter_OneofSizer(msg proto.Message) (n int) { + m := msg.(*NonBodyParameter) + // oneof + switch x := m.Oneof.(type) { + case *NonBodyParameter_HeaderParameterSubSchema: + s := proto.Size(x.HeaderParameterSubSchema) + n += proto.SizeVarint(1<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *NonBodyParameter_FormDataParameterSubSchema: + s := proto.Size(x.FormDataParameterSubSchema) + n += proto.SizeVarint(2<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *NonBodyParameter_QueryParameterSubSchema: + s := proto.Size(x.QueryParameterSubSchema) + n += proto.SizeVarint(3<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *NonBodyParameter_PathParameterSubSchema: + s := proto.Size(x.PathParameterSubSchema) + n += proto.SizeVarint(4<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type Oauth2AccessCodeSecurity struct { + Type string `protobuf:"bytes,1,opt,name=type" json:"type,omitempty"` + Flow string `protobuf:"bytes,2,opt,name=flow" json:"flow,omitempty"` + Scopes *Oauth2Scopes `protobuf:"bytes,3,opt,name=scopes" json:"scopes,omitempty"` + AuthorizationUrl string `protobuf:"bytes,4,opt,name=authorization_url,json=authorizationUrl" json:"authorization_url,omitempty"` + TokenUrl string `protobuf:"bytes,5,opt,name=token_url,json=tokenUrl" json:"token_url,omitempty"` + Description string `protobuf:"bytes,6,opt,name=description" json:"description,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,7,rep,name=vendor_extension,json=vendorExtension" json:"vendor_extension,omitempty"` +} + +func (m *Oauth2AccessCodeSecurity) Reset() { *m = Oauth2AccessCodeSecurity{} } +func (m *Oauth2AccessCodeSecurity) String() string { return proto.CompactTextString(m) } +func (*Oauth2AccessCodeSecurity) ProtoMessage() {} +func (*Oauth2AccessCodeSecurity) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{31} } + +func (m *Oauth2AccessCodeSecurity) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *Oauth2AccessCodeSecurity) GetFlow() string { + if m != nil { + return m.Flow + } + return "" +} + +func (m *Oauth2AccessCodeSecurity) GetScopes() *Oauth2Scopes { + if m != nil { + return m.Scopes + } + return nil +} + +func (m *Oauth2AccessCodeSecurity) GetAuthorizationUrl() string { + if m != nil { + return m.AuthorizationUrl + } + return "" +} + +func (m *Oauth2AccessCodeSecurity) GetTokenUrl() string { + if m != nil { + return m.TokenUrl + } + return "" +} + +func (m *Oauth2AccessCodeSecurity) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Oauth2AccessCodeSecurity) GetVendorExtension() []*NamedAny { + if m != nil { + return m.VendorExtension + } + return nil +} + +type Oauth2ApplicationSecurity struct { + Type string `protobuf:"bytes,1,opt,name=type" json:"type,omitempty"` + Flow string `protobuf:"bytes,2,opt,name=flow" json:"flow,omitempty"` + Scopes *Oauth2Scopes `protobuf:"bytes,3,opt,name=scopes" json:"scopes,omitempty"` + TokenUrl string `protobuf:"bytes,4,opt,name=token_url,json=tokenUrl" json:"token_url,omitempty"` + Description string `protobuf:"bytes,5,opt,name=description" json:"description,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,6,rep,name=vendor_extension,json=vendorExtension" json:"vendor_extension,omitempty"` +} + +func (m *Oauth2ApplicationSecurity) Reset() { *m = Oauth2ApplicationSecurity{} } +func (m *Oauth2ApplicationSecurity) String() string { return proto.CompactTextString(m) } +func (*Oauth2ApplicationSecurity) ProtoMessage() {} +func (*Oauth2ApplicationSecurity) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{32} } + +func (m *Oauth2ApplicationSecurity) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *Oauth2ApplicationSecurity) GetFlow() string { + if m != nil { + return m.Flow + } + return "" +} + +func (m *Oauth2ApplicationSecurity) GetScopes() *Oauth2Scopes { + if m != nil { + return m.Scopes + } + return nil +} + +func (m *Oauth2ApplicationSecurity) GetTokenUrl() string { + if m != nil { + return m.TokenUrl + } + return "" +} + +func (m *Oauth2ApplicationSecurity) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Oauth2ApplicationSecurity) GetVendorExtension() []*NamedAny { + if m != nil { + return m.VendorExtension + } + return nil +} + +type Oauth2ImplicitSecurity struct { + Type string `protobuf:"bytes,1,opt,name=type" json:"type,omitempty"` + Flow string `protobuf:"bytes,2,opt,name=flow" json:"flow,omitempty"` + Scopes *Oauth2Scopes `protobuf:"bytes,3,opt,name=scopes" json:"scopes,omitempty"` + AuthorizationUrl string `protobuf:"bytes,4,opt,name=authorization_url,json=authorizationUrl" json:"authorization_url,omitempty"` + Description string `protobuf:"bytes,5,opt,name=description" json:"description,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,6,rep,name=vendor_extension,json=vendorExtension" json:"vendor_extension,omitempty"` +} + +func (m *Oauth2ImplicitSecurity) Reset() { *m = Oauth2ImplicitSecurity{} } +func (m *Oauth2ImplicitSecurity) String() string { return proto.CompactTextString(m) } +func (*Oauth2ImplicitSecurity) ProtoMessage() {} +func (*Oauth2ImplicitSecurity) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{33} } + +func (m *Oauth2ImplicitSecurity) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *Oauth2ImplicitSecurity) GetFlow() string { + if m != nil { + return m.Flow + } + return "" +} + +func (m *Oauth2ImplicitSecurity) GetScopes() *Oauth2Scopes { + if m != nil { + return m.Scopes + } + return nil +} + +func (m *Oauth2ImplicitSecurity) GetAuthorizationUrl() string { + if m != nil { + return m.AuthorizationUrl + } + return "" +} + +func (m *Oauth2ImplicitSecurity) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Oauth2ImplicitSecurity) GetVendorExtension() []*NamedAny { + if m != nil { + return m.VendorExtension + } + return nil +} + +type Oauth2PasswordSecurity struct { + Type string `protobuf:"bytes,1,opt,name=type" json:"type,omitempty"` + Flow string `protobuf:"bytes,2,opt,name=flow" json:"flow,omitempty"` + Scopes *Oauth2Scopes `protobuf:"bytes,3,opt,name=scopes" json:"scopes,omitempty"` + TokenUrl string `protobuf:"bytes,4,opt,name=token_url,json=tokenUrl" json:"token_url,omitempty"` + Description string `protobuf:"bytes,5,opt,name=description" json:"description,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,6,rep,name=vendor_extension,json=vendorExtension" json:"vendor_extension,omitempty"` +} + +func (m *Oauth2PasswordSecurity) Reset() { *m = Oauth2PasswordSecurity{} } +func (m *Oauth2PasswordSecurity) String() string { return proto.CompactTextString(m) } +func (*Oauth2PasswordSecurity) ProtoMessage() {} +func (*Oauth2PasswordSecurity) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{34} } + +func (m *Oauth2PasswordSecurity) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *Oauth2PasswordSecurity) GetFlow() string { + if m != nil { + return m.Flow + } + return "" +} + +func (m *Oauth2PasswordSecurity) GetScopes() *Oauth2Scopes { + if m != nil { + return m.Scopes + } + return nil +} + +func (m *Oauth2PasswordSecurity) GetTokenUrl() string { + if m != nil { + return m.TokenUrl + } + return "" +} + +func (m *Oauth2PasswordSecurity) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Oauth2PasswordSecurity) GetVendorExtension() []*NamedAny { + if m != nil { + return m.VendorExtension + } + return nil +} + +type Oauth2Scopes struct { + AdditionalProperties []*NamedString `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` +} + +func (m *Oauth2Scopes) Reset() { *m = Oauth2Scopes{} } +func (m *Oauth2Scopes) String() string { return proto.CompactTextString(m) } +func (*Oauth2Scopes) ProtoMessage() {} +func (*Oauth2Scopes) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{35} } + +func (m *Oauth2Scopes) GetAdditionalProperties() []*NamedString { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +type Operation struct { + Tags []string `protobuf:"bytes,1,rep,name=tags" json:"tags,omitempty"` + // A brief summary of the operation. + Summary string `protobuf:"bytes,2,opt,name=summary" json:"summary,omitempty"` + // A longer description of the operation, GitHub Flavored Markdown is allowed. + Description string `protobuf:"bytes,3,opt,name=description" json:"description,omitempty"` + ExternalDocs *ExternalDocs `protobuf:"bytes,4,opt,name=external_docs,json=externalDocs" json:"external_docs,omitempty"` + // A unique identifier of the operation. + OperationId string `protobuf:"bytes,5,opt,name=operation_id,json=operationId" json:"operation_id,omitempty"` + // A list of MIME types the API can produce. + Produces []string `protobuf:"bytes,6,rep,name=produces" json:"produces,omitempty"` + // A list of MIME types the API can consume. + Consumes []string `protobuf:"bytes,7,rep,name=consumes" json:"consumes,omitempty"` + // The parameters needed to send a valid API call. + Parameters []*ParametersItem `protobuf:"bytes,8,rep,name=parameters" json:"parameters,omitempty"` + Responses *Responses `protobuf:"bytes,9,opt,name=responses" json:"responses,omitempty"` + // The transfer protocol of the API. + Schemes []string `protobuf:"bytes,10,rep,name=schemes" json:"schemes,omitempty"` + Deprecated bool `protobuf:"varint,11,opt,name=deprecated" json:"deprecated,omitempty"` + Security []*SecurityRequirement `protobuf:"bytes,12,rep,name=security" json:"security,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,13,rep,name=vendor_extension,json=vendorExtension" json:"vendor_extension,omitempty"` +} + +func (m *Operation) Reset() { *m = Operation{} } +func (m *Operation) String() string { return proto.CompactTextString(m) } +func (*Operation) ProtoMessage() {} +func (*Operation) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{36} } + +func (m *Operation) GetTags() []string { + if m != nil { + return m.Tags + } + return nil +} + +func (m *Operation) GetSummary() string { + if m != nil { + return m.Summary + } + return "" +} + +func (m *Operation) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Operation) GetExternalDocs() *ExternalDocs { + if m != nil { + return m.ExternalDocs + } + return nil +} + +func (m *Operation) GetOperationId() string { + if m != nil { + return m.OperationId + } + return "" +} + +func (m *Operation) GetProduces() []string { + if m != nil { + return m.Produces + } + return nil +} + +func (m *Operation) GetConsumes() []string { + if m != nil { + return m.Consumes + } + return nil +} + +func (m *Operation) GetParameters() []*ParametersItem { + if m != nil { + return m.Parameters + } + return nil +} + +func (m *Operation) GetResponses() *Responses { + if m != nil { + return m.Responses + } + return nil +} + +func (m *Operation) GetSchemes() []string { + if m != nil { + return m.Schemes + } + return nil +} + +func (m *Operation) GetDeprecated() bool { + if m != nil { + return m.Deprecated + } + return false +} + +func (m *Operation) GetSecurity() []*SecurityRequirement { + if m != nil { + return m.Security + } + return nil +} + +func (m *Operation) GetVendorExtension() []*NamedAny { + if m != nil { + return m.VendorExtension + } + return nil +} + +type Parameter struct { + // Types that are valid to be assigned to Oneof: + // *Parameter_BodyParameter + // *Parameter_NonBodyParameter + Oneof isParameter_Oneof `protobuf_oneof:"oneof"` +} + +func (m *Parameter) Reset() { *m = Parameter{} } +func (m *Parameter) String() string { return proto.CompactTextString(m) } +func (*Parameter) ProtoMessage() {} +func (*Parameter) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{37} } + +type isParameter_Oneof interface { + isParameter_Oneof() +} + +type Parameter_BodyParameter struct { + BodyParameter *BodyParameter `protobuf:"bytes,1,opt,name=body_parameter,json=bodyParameter,oneof"` +} +type Parameter_NonBodyParameter struct { + NonBodyParameter *NonBodyParameter `protobuf:"bytes,2,opt,name=non_body_parameter,json=nonBodyParameter,oneof"` +} + +func (*Parameter_BodyParameter) isParameter_Oneof() {} +func (*Parameter_NonBodyParameter) isParameter_Oneof() {} + +func (m *Parameter) GetOneof() isParameter_Oneof { + if m != nil { + return m.Oneof + } + return nil +} + +func (m *Parameter) GetBodyParameter() *BodyParameter { + if x, ok := m.GetOneof().(*Parameter_BodyParameter); ok { + return x.BodyParameter + } + return nil +} + +func (m *Parameter) GetNonBodyParameter() *NonBodyParameter { + if x, ok := m.GetOneof().(*Parameter_NonBodyParameter); ok { + return x.NonBodyParameter + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Parameter) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Parameter_OneofMarshaler, _Parameter_OneofUnmarshaler, _Parameter_OneofSizer, []interface{}{ + (*Parameter_BodyParameter)(nil), + (*Parameter_NonBodyParameter)(nil), + } +} + +func _Parameter_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Parameter) + // oneof + switch x := m.Oneof.(type) { + case *Parameter_BodyParameter: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BodyParameter); err != nil { + return err + } + case *Parameter_NonBodyParameter: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.NonBodyParameter); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Parameter.Oneof has unexpected type %T", x) + } + return nil +} + +func _Parameter_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Parameter) + switch tag { + case 1: // oneof.body_parameter + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(BodyParameter) + err := b.DecodeMessage(msg) + m.Oneof = &Parameter_BodyParameter{msg} + return true, err + case 2: // oneof.non_body_parameter + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(NonBodyParameter) + err := b.DecodeMessage(msg) + m.Oneof = &Parameter_NonBodyParameter{msg} + return true, err + default: + return false, nil + } +} + +func _Parameter_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Parameter) + // oneof + switch x := m.Oneof.(type) { + case *Parameter_BodyParameter: + s := proto.Size(x.BodyParameter) + n += proto.SizeVarint(1<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *Parameter_NonBodyParameter: + s := proto.Size(x.NonBodyParameter) + n += proto.SizeVarint(2<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// One or more JSON representations for parameters +type ParameterDefinitions struct { + AdditionalProperties []*NamedParameter `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` +} + +func (m *ParameterDefinitions) Reset() { *m = ParameterDefinitions{} } +func (m *ParameterDefinitions) String() string { return proto.CompactTextString(m) } +func (*ParameterDefinitions) ProtoMessage() {} +func (*ParameterDefinitions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{38} } + +func (m *ParameterDefinitions) GetAdditionalProperties() []*NamedParameter { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +type ParametersItem struct { + // Types that are valid to be assigned to Oneof: + // *ParametersItem_Parameter + // *ParametersItem_JsonReference + Oneof isParametersItem_Oneof `protobuf_oneof:"oneof"` +} + +func (m *ParametersItem) Reset() { *m = ParametersItem{} } +func (m *ParametersItem) String() string { return proto.CompactTextString(m) } +func (*ParametersItem) ProtoMessage() {} +func (*ParametersItem) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{39} } + +type isParametersItem_Oneof interface { + isParametersItem_Oneof() +} + +type ParametersItem_Parameter struct { + Parameter *Parameter `protobuf:"bytes,1,opt,name=parameter,oneof"` +} +type ParametersItem_JsonReference struct { + JsonReference *JsonReference `protobuf:"bytes,2,opt,name=json_reference,json=jsonReference,oneof"` +} + +func (*ParametersItem_Parameter) isParametersItem_Oneof() {} +func (*ParametersItem_JsonReference) isParametersItem_Oneof() {} + +func (m *ParametersItem) GetOneof() isParametersItem_Oneof { + if m != nil { + return m.Oneof + } + return nil +} + +func (m *ParametersItem) GetParameter() *Parameter { + if x, ok := m.GetOneof().(*ParametersItem_Parameter); ok { + return x.Parameter + } + return nil +} + +func (m *ParametersItem) GetJsonReference() *JsonReference { + if x, ok := m.GetOneof().(*ParametersItem_JsonReference); ok { + return x.JsonReference + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ParametersItem) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ParametersItem_OneofMarshaler, _ParametersItem_OneofUnmarshaler, _ParametersItem_OneofSizer, []interface{}{ + (*ParametersItem_Parameter)(nil), + (*ParametersItem_JsonReference)(nil), + } +} + +func _ParametersItem_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ParametersItem) + // oneof + switch x := m.Oneof.(type) { + case *ParametersItem_Parameter: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Parameter); err != nil { + return err + } + case *ParametersItem_JsonReference: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.JsonReference); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ParametersItem.Oneof has unexpected type %T", x) + } + return nil +} + +func _ParametersItem_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ParametersItem) + switch tag { + case 1: // oneof.parameter + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Parameter) + err := b.DecodeMessage(msg) + m.Oneof = &ParametersItem_Parameter{msg} + return true, err + case 2: // oneof.json_reference + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(JsonReference) + err := b.DecodeMessage(msg) + m.Oneof = &ParametersItem_JsonReference{msg} + return true, err + default: + return false, nil + } +} + +func _ParametersItem_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ParametersItem) + // oneof + switch x := m.Oneof.(type) { + case *ParametersItem_Parameter: + s := proto.Size(x.Parameter) + n += proto.SizeVarint(1<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *ParametersItem_JsonReference: + s := proto.Size(x.JsonReference) + n += proto.SizeVarint(2<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type PathItem struct { + XRef string `protobuf:"bytes,1,opt,name=_ref,json=Ref" json:"_ref,omitempty"` + Get *Operation `protobuf:"bytes,2,opt,name=get" json:"get,omitempty"` + Put *Operation `protobuf:"bytes,3,opt,name=put" json:"put,omitempty"` + Post *Operation `protobuf:"bytes,4,opt,name=post" json:"post,omitempty"` + Delete *Operation `protobuf:"bytes,5,opt,name=delete" json:"delete,omitempty"` + Options *Operation `protobuf:"bytes,6,opt,name=options" json:"options,omitempty"` + Head *Operation `protobuf:"bytes,7,opt,name=head" json:"head,omitempty"` + Patch *Operation `protobuf:"bytes,8,opt,name=patch" json:"patch,omitempty"` + // The parameters needed to send a valid API call. + Parameters []*ParametersItem `protobuf:"bytes,9,rep,name=parameters" json:"parameters,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,10,rep,name=vendor_extension,json=vendorExtension" json:"vendor_extension,omitempty"` +} + +func (m *PathItem) Reset() { *m = PathItem{} } +func (m *PathItem) String() string { return proto.CompactTextString(m) } +func (*PathItem) ProtoMessage() {} +func (*PathItem) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{40} } + +func (m *PathItem) GetXRef() string { + if m != nil { + return m.XRef + } + return "" +} + +func (m *PathItem) GetGet() *Operation { + if m != nil { + return m.Get + } + return nil +} + +func (m *PathItem) GetPut() *Operation { + if m != nil { + return m.Put + } + return nil +} + +func (m *PathItem) GetPost() *Operation { + if m != nil { + return m.Post + } + return nil +} + +func (m *PathItem) GetDelete() *Operation { + if m != nil { + return m.Delete + } + return nil +} + +func (m *PathItem) GetOptions() *Operation { + if m != nil { + return m.Options + } + return nil +} + +func (m *PathItem) GetHead() *Operation { + if m != nil { + return m.Head + } + return nil +} + +func (m *PathItem) GetPatch() *Operation { + if m != nil { + return m.Patch + } + return nil +} + +func (m *PathItem) GetParameters() []*ParametersItem { + if m != nil { + return m.Parameters + } + return nil +} + +func (m *PathItem) GetVendorExtension() []*NamedAny { + if m != nil { + return m.VendorExtension + } + return nil +} + +type PathParameterSubSchema struct { + // Determines whether or not this parameter is required or optional. + Required bool `protobuf:"varint,1,opt,name=required" json:"required,omitempty"` + // Determines the location of the parameter. + In string `protobuf:"bytes,2,opt,name=in" json:"in,omitempty"` + // A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed. + Description string `protobuf:"bytes,3,opt,name=description" json:"description,omitempty"` + // The name of the parameter. + Name string `protobuf:"bytes,4,opt,name=name" json:"name,omitempty"` + Type string `protobuf:"bytes,5,opt,name=type" json:"type,omitempty"` + Format string `protobuf:"bytes,6,opt,name=format" json:"format,omitempty"` + Items *PrimitivesItems `protobuf:"bytes,7,opt,name=items" json:"items,omitempty"` + CollectionFormat string `protobuf:"bytes,8,opt,name=collection_format,json=collectionFormat" json:"collection_format,omitempty"` + Default *Any `protobuf:"bytes,9,opt,name=default" json:"default,omitempty"` + Maximum float64 `protobuf:"fixed64,10,opt,name=maximum" json:"maximum,omitempty"` + ExclusiveMaximum bool `protobuf:"varint,11,opt,name=exclusive_maximum,json=exclusiveMaximum" json:"exclusive_maximum,omitempty"` + Minimum float64 `protobuf:"fixed64,12,opt,name=minimum" json:"minimum,omitempty"` + ExclusiveMinimum bool `protobuf:"varint,13,opt,name=exclusive_minimum,json=exclusiveMinimum" json:"exclusive_minimum,omitempty"` + MaxLength int64 `protobuf:"varint,14,opt,name=max_length,json=maxLength" json:"max_length,omitempty"` + MinLength int64 `protobuf:"varint,15,opt,name=min_length,json=minLength" json:"min_length,omitempty"` + Pattern string `protobuf:"bytes,16,opt,name=pattern" json:"pattern,omitempty"` + MaxItems int64 `protobuf:"varint,17,opt,name=max_items,json=maxItems" json:"max_items,omitempty"` + MinItems int64 `protobuf:"varint,18,opt,name=min_items,json=minItems" json:"min_items,omitempty"` + UniqueItems bool `protobuf:"varint,19,opt,name=unique_items,json=uniqueItems" json:"unique_items,omitempty"` + Enum []*Any `protobuf:"bytes,20,rep,name=enum" json:"enum,omitempty"` + MultipleOf float64 `protobuf:"fixed64,21,opt,name=multiple_of,json=multipleOf" json:"multiple_of,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,22,rep,name=vendor_extension,json=vendorExtension" json:"vendor_extension,omitempty"` +} + +func (m *PathParameterSubSchema) Reset() { *m = PathParameterSubSchema{} } +func (m *PathParameterSubSchema) String() string { return proto.CompactTextString(m) } +func (*PathParameterSubSchema) ProtoMessage() {} +func (*PathParameterSubSchema) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{41} } + +func (m *PathParameterSubSchema) GetRequired() bool { + if m != nil { + return m.Required + } + return false +} + +func (m *PathParameterSubSchema) GetIn() string { + if m != nil { + return m.In + } + return "" +} + +func (m *PathParameterSubSchema) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *PathParameterSubSchema) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *PathParameterSubSchema) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *PathParameterSubSchema) GetFormat() string { + if m != nil { + return m.Format + } + return "" +} + +func (m *PathParameterSubSchema) GetItems() *PrimitivesItems { + if m != nil { + return m.Items + } + return nil +} + +func (m *PathParameterSubSchema) GetCollectionFormat() string { + if m != nil { + return m.CollectionFormat + } + return "" +} + +func (m *PathParameterSubSchema) GetDefault() *Any { + if m != nil { + return m.Default + } + return nil +} + +func (m *PathParameterSubSchema) GetMaximum() float64 { + if m != nil { + return m.Maximum + } + return 0 +} + +func (m *PathParameterSubSchema) GetExclusiveMaximum() bool { + if m != nil { + return m.ExclusiveMaximum + } + return false +} + +func (m *PathParameterSubSchema) GetMinimum() float64 { + if m != nil { + return m.Minimum + } + return 0 +} + +func (m *PathParameterSubSchema) GetExclusiveMinimum() bool { + if m != nil { + return m.ExclusiveMinimum + } + return false +} + +func (m *PathParameterSubSchema) GetMaxLength() int64 { + if m != nil { + return m.MaxLength + } + return 0 +} + +func (m *PathParameterSubSchema) GetMinLength() int64 { + if m != nil { + return m.MinLength + } + return 0 +} + +func (m *PathParameterSubSchema) GetPattern() string { + if m != nil { + return m.Pattern + } + return "" +} + +func (m *PathParameterSubSchema) GetMaxItems() int64 { + if m != nil { + return m.MaxItems + } + return 0 +} + +func (m *PathParameterSubSchema) GetMinItems() int64 { + if m != nil { + return m.MinItems + } + return 0 +} + +func (m *PathParameterSubSchema) GetUniqueItems() bool { + if m != nil { + return m.UniqueItems + } + return false +} + +func (m *PathParameterSubSchema) GetEnum() []*Any { + if m != nil { + return m.Enum + } + return nil +} + +func (m *PathParameterSubSchema) GetMultipleOf() float64 { + if m != nil { + return m.MultipleOf + } + return 0 +} + +func (m *PathParameterSubSchema) GetVendorExtension() []*NamedAny { + if m != nil { + return m.VendorExtension + } + return nil +} + +// Relative paths to the individual endpoints. They must be relative to the 'basePath'. +type Paths struct { + VendorExtension []*NamedAny `protobuf:"bytes,1,rep,name=vendor_extension,json=vendorExtension" json:"vendor_extension,omitempty"` + Path []*NamedPathItem `protobuf:"bytes,2,rep,name=path" json:"path,omitempty"` +} + +func (m *Paths) Reset() { *m = Paths{} } +func (m *Paths) String() string { return proto.CompactTextString(m) } +func (*Paths) ProtoMessage() {} +func (*Paths) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{42} } + +func (m *Paths) GetVendorExtension() []*NamedAny { + if m != nil { + return m.VendorExtension + } + return nil +} + +func (m *Paths) GetPath() []*NamedPathItem { + if m != nil { + return m.Path + } + return nil +} + +type PrimitivesItems struct { + Type string `protobuf:"bytes,1,opt,name=type" json:"type,omitempty"` + Format string `protobuf:"bytes,2,opt,name=format" json:"format,omitempty"` + Items *PrimitivesItems `protobuf:"bytes,3,opt,name=items" json:"items,omitempty"` + CollectionFormat string `protobuf:"bytes,4,opt,name=collection_format,json=collectionFormat" json:"collection_format,omitempty"` + Default *Any `protobuf:"bytes,5,opt,name=default" json:"default,omitempty"` + Maximum float64 `protobuf:"fixed64,6,opt,name=maximum" json:"maximum,omitempty"` + ExclusiveMaximum bool `protobuf:"varint,7,opt,name=exclusive_maximum,json=exclusiveMaximum" json:"exclusive_maximum,omitempty"` + Minimum float64 `protobuf:"fixed64,8,opt,name=minimum" json:"minimum,omitempty"` + ExclusiveMinimum bool `protobuf:"varint,9,opt,name=exclusive_minimum,json=exclusiveMinimum" json:"exclusive_minimum,omitempty"` + MaxLength int64 `protobuf:"varint,10,opt,name=max_length,json=maxLength" json:"max_length,omitempty"` + MinLength int64 `protobuf:"varint,11,opt,name=min_length,json=minLength" json:"min_length,omitempty"` + Pattern string `protobuf:"bytes,12,opt,name=pattern" json:"pattern,omitempty"` + MaxItems int64 `protobuf:"varint,13,opt,name=max_items,json=maxItems" json:"max_items,omitempty"` + MinItems int64 `protobuf:"varint,14,opt,name=min_items,json=minItems" json:"min_items,omitempty"` + UniqueItems bool `protobuf:"varint,15,opt,name=unique_items,json=uniqueItems" json:"unique_items,omitempty"` + Enum []*Any `protobuf:"bytes,16,rep,name=enum" json:"enum,omitempty"` + MultipleOf float64 `protobuf:"fixed64,17,opt,name=multiple_of,json=multipleOf" json:"multiple_of,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,18,rep,name=vendor_extension,json=vendorExtension" json:"vendor_extension,omitempty"` +} + +func (m *PrimitivesItems) Reset() { *m = PrimitivesItems{} } +func (m *PrimitivesItems) String() string { return proto.CompactTextString(m) } +func (*PrimitivesItems) ProtoMessage() {} +func (*PrimitivesItems) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{43} } + +func (m *PrimitivesItems) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *PrimitivesItems) GetFormat() string { + if m != nil { + return m.Format + } + return "" +} + +func (m *PrimitivesItems) GetItems() *PrimitivesItems { + if m != nil { + return m.Items + } + return nil +} + +func (m *PrimitivesItems) GetCollectionFormat() string { + if m != nil { + return m.CollectionFormat + } + return "" +} + +func (m *PrimitivesItems) GetDefault() *Any { + if m != nil { + return m.Default + } + return nil +} + +func (m *PrimitivesItems) GetMaximum() float64 { + if m != nil { + return m.Maximum + } + return 0 +} + +func (m *PrimitivesItems) GetExclusiveMaximum() bool { + if m != nil { + return m.ExclusiveMaximum + } + return false +} + +func (m *PrimitivesItems) GetMinimum() float64 { + if m != nil { + return m.Minimum + } + return 0 +} + +func (m *PrimitivesItems) GetExclusiveMinimum() bool { + if m != nil { + return m.ExclusiveMinimum + } + return false +} + +func (m *PrimitivesItems) GetMaxLength() int64 { + if m != nil { + return m.MaxLength + } + return 0 +} + +func (m *PrimitivesItems) GetMinLength() int64 { + if m != nil { + return m.MinLength + } + return 0 +} + +func (m *PrimitivesItems) GetPattern() string { + if m != nil { + return m.Pattern + } + return "" +} + +func (m *PrimitivesItems) GetMaxItems() int64 { + if m != nil { + return m.MaxItems + } + return 0 +} + +func (m *PrimitivesItems) GetMinItems() int64 { + if m != nil { + return m.MinItems + } + return 0 +} + +func (m *PrimitivesItems) GetUniqueItems() bool { + if m != nil { + return m.UniqueItems + } + return false +} + +func (m *PrimitivesItems) GetEnum() []*Any { + if m != nil { + return m.Enum + } + return nil +} + +func (m *PrimitivesItems) GetMultipleOf() float64 { + if m != nil { + return m.MultipleOf + } + return 0 +} + +func (m *PrimitivesItems) GetVendorExtension() []*NamedAny { + if m != nil { + return m.VendorExtension + } + return nil +} + +type Properties struct { + AdditionalProperties []*NamedSchema `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` +} + +func (m *Properties) Reset() { *m = Properties{} } +func (m *Properties) String() string { return proto.CompactTextString(m) } +func (*Properties) ProtoMessage() {} +func (*Properties) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{44} } + +func (m *Properties) GetAdditionalProperties() []*NamedSchema { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +type QueryParameterSubSchema struct { + // Determines whether or not this parameter is required or optional. + Required bool `protobuf:"varint,1,opt,name=required" json:"required,omitempty"` + // Determines the location of the parameter. + In string `protobuf:"bytes,2,opt,name=in" json:"in,omitempty"` + // A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed. + Description string `protobuf:"bytes,3,opt,name=description" json:"description,omitempty"` + // The name of the parameter. + Name string `protobuf:"bytes,4,opt,name=name" json:"name,omitempty"` + // allows sending a parameter by name only or with an empty value. + AllowEmptyValue bool `protobuf:"varint,5,opt,name=allow_empty_value,json=allowEmptyValue" json:"allow_empty_value,omitempty"` + Type string `protobuf:"bytes,6,opt,name=type" json:"type,omitempty"` + Format string `protobuf:"bytes,7,opt,name=format" json:"format,omitempty"` + Items *PrimitivesItems `protobuf:"bytes,8,opt,name=items" json:"items,omitempty"` + CollectionFormat string `protobuf:"bytes,9,opt,name=collection_format,json=collectionFormat" json:"collection_format,omitempty"` + Default *Any `protobuf:"bytes,10,opt,name=default" json:"default,omitempty"` + Maximum float64 `protobuf:"fixed64,11,opt,name=maximum" json:"maximum,omitempty"` + ExclusiveMaximum bool `protobuf:"varint,12,opt,name=exclusive_maximum,json=exclusiveMaximum" json:"exclusive_maximum,omitempty"` + Minimum float64 `protobuf:"fixed64,13,opt,name=minimum" json:"minimum,omitempty"` + ExclusiveMinimum bool `protobuf:"varint,14,opt,name=exclusive_minimum,json=exclusiveMinimum" json:"exclusive_minimum,omitempty"` + MaxLength int64 `protobuf:"varint,15,opt,name=max_length,json=maxLength" json:"max_length,omitempty"` + MinLength int64 `protobuf:"varint,16,opt,name=min_length,json=minLength" json:"min_length,omitempty"` + Pattern string `protobuf:"bytes,17,opt,name=pattern" json:"pattern,omitempty"` + MaxItems int64 `protobuf:"varint,18,opt,name=max_items,json=maxItems" json:"max_items,omitempty"` + MinItems int64 `protobuf:"varint,19,opt,name=min_items,json=minItems" json:"min_items,omitempty"` + UniqueItems bool `protobuf:"varint,20,opt,name=unique_items,json=uniqueItems" json:"unique_items,omitempty"` + Enum []*Any `protobuf:"bytes,21,rep,name=enum" json:"enum,omitempty"` + MultipleOf float64 `protobuf:"fixed64,22,opt,name=multiple_of,json=multipleOf" json:"multiple_of,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,23,rep,name=vendor_extension,json=vendorExtension" json:"vendor_extension,omitempty"` +} + +func (m *QueryParameterSubSchema) Reset() { *m = QueryParameterSubSchema{} } +func (m *QueryParameterSubSchema) String() string { return proto.CompactTextString(m) } +func (*QueryParameterSubSchema) ProtoMessage() {} +func (*QueryParameterSubSchema) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{45} } + +func (m *QueryParameterSubSchema) GetRequired() bool { + if m != nil { + return m.Required + } + return false +} + +func (m *QueryParameterSubSchema) GetIn() string { + if m != nil { + return m.In + } + return "" +} + +func (m *QueryParameterSubSchema) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *QueryParameterSubSchema) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *QueryParameterSubSchema) GetAllowEmptyValue() bool { + if m != nil { + return m.AllowEmptyValue + } + return false +} + +func (m *QueryParameterSubSchema) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *QueryParameterSubSchema) GetFormat() string { + if m != nil { + return m.Format + } + return "" +} + +func (m *QueryParameterSubSchema) GetItems() *PrimitivesItems { + if m != nil { + return m.Items + } + return nil +} + +func (m *QueryParameterSubSchema) GetCollectionFormat() string { + if m != nil { + return m.CollectionFormat + } + return "" +} + +func (m *QueryParameterSubSchema) GetDefault() *Any { + if m != nil { + return m.Default + } + return nil +} + +func (m *QueryParameterSubSchema) GetMaximum() float64 { + if m != nil { + return m.Maximum + } + return 0 +} + +func (m *QueryParameterSubSchema) GetExclusiveMaximum() bool { + if m != nil { + return m.ExclusiveMaximum + } + return false +} + +func (m *QueryParameterSubSchema) GetMinimum() float64 { + if m != nil { + return m.Minimum + } + return 0 +} + +func (m *QueryParameterSubSchema) GetExclusiveMinimum() bool { + if m != nil { + return m.ExclusiveMinimum + } + return false +} + +func (m *QueryParameterSubSchema) GetMaxLength() int64 { + if m != nil { + return m.MaxLength + } + return 0 +} + +func (m *QueryParameterSubSchema) GetMinLength() int64 { + if m != nil { + return m.MinLength + } + return 0 +} + +func (m *QueryParameterSubSchema) GetPattern() string { + if m != nil { + return m.Pattern + } + return "" +} + +func (m *QueryParameterSubSchema) GetMaxItems() int64 { + if m != nil { + return m.MaxItems + } + return 0 +} + +func (m *QueryParameterSubSchema) GetMinItems() int64 { + if m != nil { + return m.MinItems + } + return 0 +} + +func (m *QueryParameterSubSchema) GetUniqueItems() bool { + if m != nil { + return m.UniqueItems + } + return false +} + +func (m *QueryParameterSubSchema) GetEnum() []*Any { + if m != nil { + return m.Enum + } + return nil +} + +func (m *QueryParameterSubSchema) GetMultipleOf() float64 { + if m != nil { + return m.MultipleOf + } + return 0 +} + +func (m *QueryParameterSubSchema) GetVendorExtension() []*NamedAny { + if m != nil { + return m.VendorExtension + } + return nil +} + +type Response struct { + Description string `protobuf:"bytes,1,opt,name=description" json:"description,omitempty"` + Schema *SchemaItem `protobuf:"bytes,2,opt,name=schema" json:"schema,omitempty"` + Headers *Headers `protobuf:"bytes,3,opt,name=headers" json:"headers,omitempty"` + Examples *Examples `protobuf:"bytes,4,opt,name=examples" json:"examples,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,5,rep,name=vendor_extension,json=vendorExtension" json:"vendor_extension,omitempty"` +} + +func (m *Response) Reset() { *m = Response{} } +func (m *Response) String() string { return proto.CompactTextString(m) } +func (*Response) ProtoMessage() {} +func (*Response) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{46} } + +func (m *Response) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Response) GetSchema() *SchemaItem { + if m != nil { + return m.Schema + } + return nil +} + +func (m *Response) GetHeaders() *Headers { + if m != nil { + return m.Headers + } + return nil +} + +func (m *Response) GetExamples() *Examples { + if m != nil { + return m.Examples + } + return nil +} + +func (m *Response) GetVendorExtension() []*NamedAny { + if m != nil { + return m.VendorExtension + } + return nil +} + +// One or more JSON representations for parameters +type ResponseDefinitions struct { + AdditionalProperties []*NamedResponse `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` +} + +func (m *ResponseDefinitions) Reset() { *m = ResponseDefinitions{} } +func (m *ResponseDefinitions) String() string { return proto.CompactTextString(m) } +func (*ResponseDefinitions) ProtoMessage() {} +func (*ResponseDefinitions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{47} } + +func (m *ResponseDefinitions) GetAdditionalProperties() []*NamedResponse { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +type ResponseValue struct { + // Types that are valid to be assigned to Oneof: + // *ResponseValue_Response + // *ResponseValue_JsonReference + Oneof isResponseValue_Oneof `protobuf_oneof:"oneof"` +} + +func (m *ResponseValue) Reset() { *m = ResponseValue{} } +func (m *ResponseValue) String() string { return proto.CompactTextString(m) } +func (*ResponseValue) ProtoMessage() {} +func (*ResponseValue) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{48} } + +type isResponseValue_Oneof interface { + isResponseValue_Oneof() +} + +type ResponseValue_Response struct { + Response *Response `protobuf:"bytes,1,opt,name=response,oneof"` +} +type ResponseValue_JsonReference struct { + JsonReference *JsonReference `protobuf:"bytes,2,opt,name=json_reference,json=jsonReference,oneof"` +} + +func (*ResponseValue_Response) isResponseValue_Oneof() {} +func (*ResponseValue_JsonReference) isResponseValue_Oneof() {} + +func (m *ResponseValue) GetOneof() isResponseValue_Oneof { + if m != nil { + return m.Oneof + } + return nil +} + +func (m *ResponseValue) GetResponse() *Response { + if x, ok := m.GetOneof().(*ResponseValue_Response); ok { + return x.Response + } + return nil +} + +func (m *ResponseValue) GetJsonReference() *JsonReference { + if x, ok := m.GetOneof().(*ResponseValue_JsonReference); ok { + return x.JsonReference + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ResponseValue) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ResponseValue_OneofMarshaler, _ResponseValue_OneofUnmarshaler, _ResponseValue_OneofSizer, []interface{}{ + (*ResponseValue_Response)(nil), + (*ResponseValue_JsonReference)(nil), + } +} + +func _ResponseValue_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ResponseValue) + // oneof + switch x := m.Oneof.(type) { + case *ResponseValue_Response: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Response); err != nil { + return err + } + case *ResponseValue_JsonReference: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.JsonReference); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ResponseValue.Oneof has unexpected type %T", x) + } + return nil +} + +func _ResponseValue_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ResponseValue) + switch tag { + case 1: // oneof.response + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Response) + err := b.DecodeMessage(msg) + m.Oneof = &ResponseValue_Response{msg} + return true, err + case 2: // oneof.json_reference + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(JsonReference) + err := b.DecodeMessage(msg) + m.Oneof = &ResponseValue_JsonReference{msg} + return true, err + default: + return false, nil + } +} + +func _ResponseValue_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ResponseValue) + // oneof + switch x := m.Oneof.(type) { + case *ResponseValue_Response: + s := proto.Size(x.Response) + n += proto.SizeVarint(1<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *ResponseValue_JsonReference: + s := proto.Size(x.JsonReference) + n += proto.SizeVarint(2<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response objects names can either be any valid HTTP status code or 'default'. +type Responses struct { + ResponseCode []*NamedResponseValue `protobuf:"bytes,1,rep,name=response_code,json=responseCode" json:"response_code,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,2,rep,name=vendor_extension,json=vendorExtension" json:"vendor_extension,omitempty"` +} + +func (m *Responses) Reset() { *m = Responses{} } +func (m *Responses) String() string { return proto.CompactTextString(m) } +func (*Responses) ProtoMessage() {} +func (*Responses) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{49} } + +func (m *Responses) GetResponseCode() []*NamedResponseValue { + if m != nil { + return m.ResponseCode + } + return nil +} + +func (m *Responses) GetVendorExtension() []*NamedAny { + if m != nil { + return m.VendorExtension + } + return nil +} + +// A deterministic version of a JSON Schema object. +type Schema struct { + XRef string `protobuf:"bytes,1,opt,name=_ref,json=Ref" json:"_ref,omitempty"` + Format string `protobuf:"bytes,2,opt,name=format" json:"format,omitempty"` + Title string `protobuf:"bytes,3,opt,name=title" json:"title,omitempty"` + Description string `protobuf:"bytes,4,opt,name=description" json:"description,omitempty"` + Default *Any `protobuf:"bytes,5,opt,name=default" json:"default,omitempty"` + MultipleOf float64 `protobuf:"fixed64,6,opt,name=multiple_of,json=multipleOf" json:"multiple_of,omitempty"` + Maximum float64 `protobuf:"fixed64,7,opt,name=maximum" json:"maximum,omitempty"` + ExclusiveMaximum bool `protobuf:"varint,8,opt,name=exclusive_maximum,json=exclusiveMaximum" json:"exclusive_maximum,omitempty"` + Minimum float64 `protobuf:"fixed64,9,opt,name=minimum" json:"minimum,omitempty"` + ExclusiveMinimum bool `protobuf:"varint,10,opt,name=exclusive_minimum,json=exclusiveMinimum" json:"exclusive_minimum,omitempty"` + MaxLength int64 `protobuf:"varint,11,opt,name=max_length,json=maxLength" json:"max_length,omitempty"` + MinLength int64 `protobuf:"varint,12,opt,name=min_length,json=minLength" json:"min_length,omitempty"` + Pattern string `protobuf:"bytes,13,opt,name=pattern" json:"pattern,omitempty"` + MaxItems int64 `protobuf:"varint,14,opt,name=max_items,json=maxItems" json:"max_items,omitempty"` + MinItems int64 `protobuf:"varint,15,opt,name=min_items,json=minItems" json:"min_items,omitempty"` + UniqueItems bool `protobuf:"varint,16,opt,name=unique_items,json=uniqueItems" json:"unique_items,omitempty"` + MaxProperties int64 `protobuf:"varint,17,opt,name=max_properties,json=maxProperties" json:"max_properties,omitempty"` + MinProperties int64 `protobuf:"varint,18,opt,name=min_properties,json=minProperties" json:"min_properties,omitempty"` + Required []string `protobuf:"bytes,19,rep,name=required" json:"required,omitempty"` + Enum []*Any `protobuf:"bytes,20,rep,name=enum" json:"enum,omitempty"` + AdditionalProperties *AdditionalPropertiesItem `protobuf:"bytes,21,opt,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` + Type *TypeItem `protobuf:"bytes,22,opt,name=type" json:"type,omitempty"` + Items *ItemsItem `protobuf:"bytes,23,opt,name=items" json:"items,omitempty"` + AllOf []*Schema `protobuf:"bytes,24,rep,name=all_of,json=allOf" json:"all_of,omitempty"` + Properties *Properties `protobuf:"bytes,25,opt,name=properties" json:"properties,omitempty"` + Discriminator string `protobuf:"bytes,26,opt,name=discriminator" json:"discriminator,omitempty"` + ReadOnly bool `protobuf:"varint,27,opt,name=read_only,json=readOnly" json:"read_only,omitempty"` + Xml *Xml `protobuf:"bytes,28,opt,name=xml" json:"xml,omitempty"` + ExternalDocs *ExternalDocs `protobuf:"bytes,29,opt,name=external_docs,json=externalDocs" json:"external_docs,omitempty"` + Example *Any `protobuf:"bytes,30,opt,name=example" json:"example,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,31,rep,name=vendor_extension,json=vendorExtension" json:"vendor_extension,omitempty"` +} + +func (m *Schema) Reset() { *m = Schema{} } +func (m *Schema) String() string { return proto.CompactTextString(m) } +func (*Schema) ProtoMessage() {} +func (*Schema) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{50} } + +func (m *Schema) GetXRef() string { + if m != nil { + return m.XRef + } + return "" +} + +func (m *Schema) GetFormat() string { + if m != nil { + return m.Format + } + return "" +} + +func (m *Schema) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *Schema) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Schema) GetDefault() *Any { + if m != nil { + return m.Default + } + return nil +} + +func (m *Schema) GetMultipleOf() float64 { + if m != nil { + return m.MultipleOf + } + return 0 +} + +func (m *Schema) GetMaximum() float64 { + if m != nil { + return m.Maximum + } + return 0 +} + +func (m *Schema) GetExclusiveMaximum() bool { + if m != nil { + return m.ExclusiveMaximum + } + return false +} + +func (m *Schema) GetMinimum() float64 { + if m != nil { + return m.Minimum + } + return 0 +} + +func (m *Schema) GetExclusiveMinimum() bool { + if m != nil { + return m.ExclusiveMinimum + } + return false +} + +func (m *Schema) GetMaxLength() int64 { + if m != nil { + return m.MaxLength + } + return 0 +} + +func (m *Schema) GetMinLength() int64 { + if m != nil { + return m.MinLength + } + return 0 +} + +func (m *Schema) GetPattern() string { + if m != nil { + return m.Pattern + } + return "" +} + +func (m *Schema) GetMaxItems() int64 { + if m != nil { + return m.MaxItems + } + return 0 +} + +func (m *Schema) GetMinItems() int64 { + if m != nil { + return m.MinItems + } + return 0 +} + +func (m *Schema) GetUniqueItems() bool { + if m != nil { + return m.UniqueItems + } + return false +} + +func (m *Schema) GetMaxProperties() int64 { + if m != nil { + return m.MaxProperties + } + return 0 +} + +func (m *Schema) GetMinProperties() int64 { + if m != nil { + return m.MinProperties + } + return 0 +} + +func (m *Schema) GetRequired() []string { + if m != nil { + return m.Required + } + return nil +} + +func (m *Schema) GetEnum() []*Any { + if m != nil { + return m.Enum + } + return nil +} + +func (m *Schema) GetAdditionalProperties() *AdditionalPropertiesItem { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +func (m *Schema) GetType() *TypeItem { + if m != nil { + return m.Type + } + return nil +} + +func (m *Schema) GetItems() *ItemsItem { + if m != nil { + return m.Items + } + return nil +} + +func (m *Schema) GetAllOf() []*Schema { + if m != nil { + return m.AllOf + } + return nil +} + +func (m *Schema) GetProperties() *Properties { + if m != nil { + return m.Properties + } + return nil +} + +func (m *Schema) GetDiscriminator() string { + if m != nil { + return m.Discriminator + } + return "" +} + +func (m *Schema) GetReadOnly() bool { + if m != nil { + return m.ReadOnly + } + return false +} + +func (m *Schema) GetXml() *Xml { + if m != nil { + return m.Xml + } + return nil +} + +func (m *Schema) GetExternalDocs() *ExternalDocs { + if m != nil { + return m.ExternalDocs + } + return nil +} + +func (m *Schema) GetExample() *Any { + if m != nil { + return m.Example + } + return nil +} + +func (m *Schema) GetVendorExtension() []*NamedAny { + if m != nil { + return m.VendorExtension + } + return nil +} + +type SchemaItem struct { + // Types that are valid to be assigned to Oneof: + // *SchemaItem_Schema + // *SchemaItem_FileSchema + Oneof isSchemaItem_Oneof `protobuf_oneof:"oneof"` +} + +func (m *SchemaItem) Reset() { *m = SchemaItem{} } +func (m *SchemaItem) String() string { return proto.CompactTextString(m) } +func (*SchemaItem) ProtoMessage() {} +func (*SchemaItem) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{51} } + +type isSchemaItem_Oneof interface { + isSchemaItem_Oneof() +} + +type SchemaItem_Schema struct { + Schema *Schema `protobuf:"bytes,1,opt,name=schema,oneof"` +} +type SchemaItem_FileSchema struct { + FileSchema *FileSchema `protobuf:"bytes,2,opt,name=file_schema,json=fileSchema,oneof"` +} + +func (*SchemaItem_Schema) isSchemaItem_Oneof() {} +func (*SchemaItem_FileSchema) isSchemaItem_Oneof() {} + +func (m *SchemaItem) GetOneof() isSchemaItem_Oneof { + if m != nil { + return m.Oneof + } + return nil +} + +func (m *SchemaItem) GetSchema() *Schema { + if x, ok := m.GetOneof().(*SchemaItem_Schema); ok { + return x.Schema + } + return nil +} + +func (m *SchemaItem) GetFileSchema() *FileSchema { + if x, ok := m.GetOneof().(*SchemaItem_FileSchema); ok { + return x.FileSchema + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*SchemaItem) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _SchemaItem_OneofMarshaler, _SchemaItem_OneofUnmarshaler, _SchemaItem_OneofSizer, []interface{}{ + (*SchemaItem_Schema)(nil), + (*SchemaItem_FileSchema)(nil), + } +} + +func _SchemaItem_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*SchemaItem) + // oneof + switch x := m.Oneof.(type) { + case *SchemaItem_Schema: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Schema); err != nil { + return err + } + case *SchemaItem_FileSchema: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.FileSchema); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("SchemaItem.Oneof has unexpected type %T", x) + } + return nil +} + +func _SchemaItem_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*SchemaItem) + switch tag { + case 1: // oneof.schema + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Schema) + err := b.DecodeMessage(msg) + m.Oneof = &SchemaItem_Schema{msg} + return true, err + case 2: // oneof.file_schema + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(FileSchema) + err := b.DecodeMessage(msg) + m.Oneof = &SchemaItem_FileSchema{msg} + return true, err + default: + return false, nil + } +} + +func _SchemaItem_OneofSizer(msg proto.Message) (n int) { + m := msg.(*SchemaItem) + // oneof + switch x := m.Oneof.(type) { + case *SchemaItem_Schema: + s := proto.Size(x.Schema) + n += proto.SizeVarint(1<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *SchemaItem_FileSchema: + s := proto.Size(x.FileSchema) + n += proto.SizeVarint(2<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type SecurityDefinitions struct { + AdditionalProperties []*NamedSecurityDefinitionsItem `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` +} + +func (m *SecurityDefinitions) Reset() { *m = SecurityDefinitions{} } +func (m *SecurityDefinitions) String() string { return proto.CompactTextString(m) } +func (*SecurityDefinitions) ProtoMessage() {} +func (*SecurityDefinitions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{52} } + +func (m *SecurityDefinitions) GetAdditionalProperties() []*NamedSecurityDefinitionsItem { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +type SecurityDefinitionsItem struct { + // Types that are valid to be assigned to Oneof: + // *SecurityDefinitionsItem_BasicAuthenticationSecurity + // *SecurityDefinitionsItem_ApiKeySecurity + // *SecurityDefinitionsItem_Oauth2ImplicitSecurity + // *SecurityDefinitionsItem_Oauth2PasswordSecurity + // *SecurityDefinitionsItem_Oauth2ApplicationSecurity + // *SecurityDefinitionsItem_Oauth2AccessCodeSecurity + Oneof isSecurityDefinitionsItem_Oneof `protobuf_oneof:"oneof"` +} + +func (m *SecurityDefinitionsItem) Reset() { *m = SecurityDefinitionsItem{} } +func (m *SecurityDefinitionsItem) String() string { return proto.CompactTextString(m) } +func (*SecurityDefinitionsItem) ProtoMessage() {} +func (*SecurityDefinitionsItem) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{53} } + +type isSecurityDefinitionsItem_Oneof interface { + isSecurityDefinitionsItem_Oneof() +} + +type SecurityDefinitionsItem_BasicAuthenticationSecurity struct { + BasicAuthenticationSecurity *BasicAuthenticationSecurity `protobuf:"bytes,1,opt,name=basic_authentication_security,json=basicAuthenticationSecurity,oneof"` +} +type SecurityDefinitionsItem_ApiKeySecurity struct { + ApiKeySecurity *ApiKeySecurity `protobuf:"bytes,2,opt,name=api_key_security,json=apiKeySecurity,oneof"` +} +type SecurityDefinitionsItem_Oauth2ImplicitSecurity struct { + Oauth2ImplicitSecurity *Oauth2ImplicitSecurity `protobuf:"bytes,3,opt,name=oauth2_implicit_security,json=oauth2ImplicitSecurity,oneof"` +} +type SecurityDefinitionsItem_Oauth2PasswordSecurity struct { + Oauth2PasswordSecurity *Oauth2PasswordSecurity `protobuf:"bytes,4,opt,name=oauth2_password_security,json=oauth2PasswordSecurity,oneof"` +} +type SecurityDefinitionsItem_Oauth2ApplicationSecurity struct { + Oauth2ApplicationSecurity *Oauth2ApplicationSecurity `protobuf:"bytes,5,opt,name=oauth2_application_security,json=oauth2ApplicationSecurity,oneof"` +} +type SecurityDefinitionsItem_Oauth2AccessCodeSecurity struct { + Oauth2AccessCodeSecurity *Oauth2AccessCodeSecurity `protobuf:"bytes,6,opt,name=oauth2_access_code_security,json=oauth2AccessCodeSecurity,oneof"` +} + +func (*SecurityDefinitionsItem_BasicAuthenticationSecurity) isSecurityDefinitionsItem_Oneof() {} +func (*SecurityDefinitionsItem_ApiKeySecurity) isSecurityDefinitionsItem_Oneof() {} +func (*SecurityDefinitionsItem_Oauth2ImplicitSecurity) isSecurityDefinitionsItem_Oneof() {} +func (*SecurityDefinitionsItem_Oauth2PasswordSecurity) isSecurityDefinitionsItem_Oneof() {} +func (*SecurityDefinitionsItem_Oauth2ApplicationSecurity) isSecurityDefinitionsItem_Oneof() {} +func (*SecurityDefinitionsItem_Oauth2AccessCodeSecurity) isSecurityDefinitionsItem_Oneof() {} + +func (m *SecurityDefinitionsItem) GetOneof() isSecurityDefinitionsItem_Oneof { + if m != nil { + return m.Oneof + } + return nil +} + +func (m *SecurityDefinitionsItem) GetBasicAuthenticationSecurity() *BasicAuthenticationSecurity { + if x, ok := m.GetOneof().(*SecurityDefinitionsItem_BasicAuthenticationSecurity); ok { + return x.BasicAuthenticationSecurity + } + return nil +} + +func (m *SecurityDefinitionsItem) GetApiKeySecurity() *ApiKeySecurity { + if x, ok := m.GetOneof().(*SecurityDefinitionsItem_ApiKeySecurity); ok { + return x.ApiKeySecurity + } + return nil +} + +func (m *SecurityDefinitionsItem) GetOauth2ImplicitSecurity() *Oauth2ImplicitSecurity { + if x, ok := m.GetOneof().(*SecurityDefinitionsItem_Oauth2ImplicitSecurity); ok { + return x.Oauth2ImplicitSecurity + } + return nil +} + +func (m *SecurityDefinitionsItem) GetOauth2PasswordSecurity() *Oauth2PasswordSecurity { + if x, ok := m.GetOneof().(*SecurityDefinitionsItem_Oauth2PasswordSecurity); ok { + return x.Oauth2PasswordSecurity + } + return nil +} + +func (m *SecurityDefinitionsItem) GetOauth2ApplicationSecurity() *Oauth2ApplicationSecurity { + if x, ok := m.GetOneof().(*SecurityDefinitionsItem_Oauth2ApplicationSecurity); ok { + return x.Oauth2ApplicationSecurity + } + return nil +} + +func (m *SecurityDefinitionsItem) GetOauth2AccessCodeSecurity() *Oauth2AccessCodeSecurity { + if x, ok := m.GetOneof().(*SecurityDefinitionsItem_Oauth2AccessCodeSecurity); ok { + return x.Oauth2AccessCodeSecurity + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*SecurityDefinitionsItem) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _SecurityDefinitionsItem_OneofMarshaler, _SecurityDefinitionsItem_OneofUnmarshaler, _SecurityDefinitionsItem_OneofSizer, []interface{}{ + (*SecurityDefinitionsItem_BasicAuthenticationSecurity)(nil), + (*SecurityDefinitionsItem_ApiKeySecurity)(nil), + (*SecurityDefinitionsItem_Oauth2ImplicitSecurity)(nil), + (*SecurityDefinitionsItem_Oauth2PasswordSecurity)(nil), + (*SecurityDefinitionsItem_Oauth2ApplicationSecurity)(nil), + (*SecurityDefinitionsItem_Oauth2AccessCodeSecurity)(nil), + } +} + +func _SecurityDefinitionsItem_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*SecurityDefinitionsItem) + // oneof + switch x := m.Oneof.(type) { + case *SecurityDefinitionsItem_BasicAuthenticationSecurity: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BasicAuthenticationSecurity); err != nil { + return err + } + case *SecurityDefinitionsItem_ApiKeySecurity: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ApiKeySecurity); err != nil { + return err + } + case *SecurityDefinitionsItem_Oauth2ImplicitSecurity: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Oauth2ImplicitSecurity); err != nil { + return err + } + case *SecurityDefinitionsItem_Oauth2PasswordSecurity: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Oauth2PasswordSecurity); err != nil { + return err + } + case *SecurityDefinitionsItem_Oauth2ApplicationSecurity: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Oauth2ApplicationSecurity); err != nil { + return err + } + case *SecurityDefinitionsItem_Oauth2AccessCodeSecurity: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Oauth2AccessCodeSecurity); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("SecurityDefinitionsItem.Oneof has unexpected type %T", x) + } + return nil +} + +func _SecurityDefinitionsItem_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*SecurityDefinitionsItem) + switch tag { + case 1: // oneof.basic_authentication_security + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(BasicAuthenticationSecurity) + err := b.DecodeMessage(msg) + m.Oneof = &SecurityDefinitionsItem_BasicAuthenticationSecurity{msg} + return true, err + case 2: // oneof.api_key_security + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ApiKeySecurity) + err := b.DecodeMessage(msg) + m.Oneof = &SecurityDefinitionsItem_ApiKeySecurity{msg} + return true, err + case 3: // oneof.oauth2_implicit_security + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Oauth2ImplicitSecurity) + err := b.DecodeMessage(msg) + m.Oneof = &SecurityDefinitionsItem_Oauth2ImplicitSecurity{msg} + return true, err + case 4: // oneof.oauth2_password_security + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Oauth2PasswordSecurity) + err := b.DecodeMessage(msg) + m.Oneof = &SecurityDefinitionsItem_Oauth2PasswordSecurity{msg} + return true, err + case 5: // oneof.oauth2_application_security + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Oauth2ApplicationSecurity) + err := b.DecodeMessage(msg) + m.Oneof = &SecurityDefinitionsItem_Oauth2ApplicationSecurity{msg} + return true, err + case 6: // oneof.oauth2_access_code_security + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Oauth2AccessCodeSecurity) + err := b.DecodeMessage(msg) + m.Oneof = &SecurityDefinitionsItem_Oauth2AccessCodeSecurity{msg} + return true, err + default: + return false, nil + } +} + +func _SecurityDefinitionsItem_OneofSizer(msg proto.Message) (n int) { + m := msg.(*SecurityDefinitionsItem) + // oneof + switch x := m.Oneof.(type) { + case *SecurityDefinitionsItem_BasicAuthenticationSecurity: + s := proto.Size(x.BasicAuthenticationSecurity) + n += proto.SizeVarint(1<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *SecurityDefinitionsItem_ApiKeySecurity: + s := proto.Size(x.ApiKeySecurity) + n += proto.SizeVarint(2<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *SecurityDefinitionsItem_Oauth2ImplicitSecurity: + s := proto.Size(x.Oauth2ImplicitSecurity) + n += proto.SizeVarint(3<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *SecurityDefinitionsItem_Oauth2PasswordSecurity: + s := proto.Size(x.Oauth2PasswordSecurity) + n += proto.SizeVarint(4<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *SecurityDefinitionsItem_Oauth2ApplicationSecurity: + s := proto.Size(x.Oauth2ApplicationSecurity) + n += proto.SizeVarint(5<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *SecurityDefinitionsItem_Oauth2AccessCodeSecurity: + s := proto.Size(x.Oauth2AccessCodeSecurity) + n += proto.SizeVarint(6<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type SecurityRequirement struct { + AdditionalProperties []*NamedStringArray `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` +} + +func (m *SecurityRequirement) Reset() { *m = SecurityRequirement{} } +func (m *SecurityRequirement) String() string { return proto.CompactTextString(m) } +func (*SecurityRequirement) ProtoMessage() {} +func (*SecurityRequirement) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{54} } + +func (m *SecurityRequirement) GetAdditionalProperties() []*NamedStringArray { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +type StringArray struct { + Value []string `protobuf:"bytes,1,rep,name=value" json:"value,omitempty"` +} + +func (m *StringArray) Reset() { *m = StringArray{} } +func (m *StringArray) String() string { return proto.CompactTextString(m) } +func (*StringArray) ProtoMessage() {} +func (*StringArray) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{55} } + +func (m *StringArray) GetValue() []string { + if m != nil { + return m.Value + } + return nil +} + +type Tag struct { + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Description string `protobuf:"bytes,2,opt,name=description" json:"description,omitempty"` + ExternalDocs *ExternalDocs `protobuf:"bytes,3,opt,name=external_docs,json=externalDocs" json:"external_docs,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,4,rep,name=vendor_extension,json=vendorExtension" json:"vendor_extension,omitempty"` +} + +func (m *Tag) Reset() { *m = Tag{} } +func (m *Tag) String() string { return proto.CompactTextString(m) } +func (*Tag) ProtoMessage() {} +func (*Tag) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{56} } + +func (m *Tag) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Tag) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Tag) GetExternalDocs() *ExternalDocs { + if m != nil { + return m.ExternalDocs + } + return nil +} + +func (m *Tag) GetVendorExtension() []*NamedAny { + if m != nil { + return m.VendorExtension + } + return nil +} + +type TypeItem struct { + Value []string `protobuf:"bytes,1,rep,name=value" json:"value,omitempty"` +} + +func (m *TypeItem) Reset() { *m = TypeItem{} } +func (m *TypeItem) String() string { return proto.CompactTextString(m) } +func (*TypeItem) ProtoMessage() {} +func (*TypeItem) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{57} } + +func (m *TypeItem) GetValue() []string { + if m != nil { + return m.Value + } + return nil +} + +// Any property starting with x- is valid. +type VendorExtension struct { + AdditionalProperties []*NamedAny `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` +} + +func (m *VendorExtension) Reset() { *m = VendorExtension{} } +func (m *VendorExtension) String() string { return proto.CompactTextString(m) } +func (*VendorExtension) ProtoMessage() {} +func (*VendorExtension) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{58} } + +func (m *VendorExtension) GetAdditionalProperties() []*NamedAny { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +type Xml struct { + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Namespace string `protobuf:"bytes,2,opt,name=namespace" json:"namespace,omitempty"` + Prefix string `protobuf:"bytes,3,opt,name=prefix" json:"prefix,omitempty"` + Attribute bool `protobuf:"varint,4,opt,name=attribute" json:"attribute,omitempty"` + Wrapped bool `protobuf:"varint,5,opt,name=wrapped" json:"wrapped,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,6,rep,name=vendor_extension,json=vendorExtension" json:"vendor_extension,omitempty"` +} + +func (m *Xml) Reset() { *m = Xml{} } +func (m *Xml) String() string { return proto.CompactTextString(m) } +func (*Xml) ProtoMessage() {} +func (*Xml) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{59} } + +func (m *Xml) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Xml) GetNamespace() string { + if m != nil { + return m.Namespace + } + return "" +} + +func (m *Xml) GetPrefix() string { + if m != nil { + return m.Prefix + } + return "" +} + +func (m *Xml) GetAttribute() bool { + if m != nil { + return m.Attribute + } + return false +} + +func (m *Xml) GetWrapped() bool { + if m != nil { + return m.Wrapped + } + return false +} + +func (m *Xml) GetVendorExtension() []*NamedAny { + if m != nil { + return m.VendorExtension + } + return nil +} + +func init() { + proto.RegisterType((*AdditionalPropertiesItem)(nil), "openapi.v2.AdditionalPropertiesItem") + proto.RegisterType((*Any)(nil), "openapi.v2.Any") + proto.RegisterType((*ApiKeySecurity)(nil), "openapi.v2.ApiKeySecurity") + proto.RegisterType((*BasicAuthenticationSecurity)(nil), "openapi.v2.BasicAuthenticationSecurity") + proto.RegisterType((*BodyParameter)(nil), "openapi.v2.BodyParameter") + proto.RegisterType((*Contact)(nil), "openapi.v2.Contact") + proto.RegisterType((*Default)(nil), "openapi.v2.Default") + proto.RegisterType((*Definitions)(nil), "openapi.v2.Definitions") + proto.RegisterType((*Document)(nil), "openapi.v2.Document") + proto.RegisterType((*Examples)(nil), "openapi.v2.Examples") + proto.RegisterType((*ExternalDocs)(nil), "openapi.v2.ExternalDocs") + proto.RegisterType((*FileSchema)(nil), "openapi.v2.FileSchema") + proto.RegisterType((*FormDataParameterSubSchema)(nil), "openapi.v2.FormDataParameterSubSchema") + proto.RegisterType((*Header)(nil), "openapi.v2.Header") + proto.RegisterType((*HeaderParameterSubSchema)(nil), "openapi.v2.HeaderParameterSubSchema") + proto.RegisterType((*Headers)(nil), "openapi.v2.Headers") + proto.RegisterType((*Info)(nil), "openapi.v2.Info") + proto.RegisterType((*ItemsItem)(nil), "openapi.v2.ItemsItem") + proto.RegisterType((*JsonReference)(nil), "openapi.v2.JsonReference") + proto.RegisterType((*License)(nil), "openapi.v2.License") + proto.RegisterType((*NamedAny)(nil), "openapi.v2.NamedAny") + proto.RegisterType((*NamedHeader)(nil), "openapi.v2.NamedHeader") + proto.RegisterType((*NamedParameter)(nil), "openapi.v2.NamedParameter") + proto.RegisterType((*NamedPathItem)(nil), "openapi.v2.NamedPathItem") + proto.RegisterType((*NamedResponse)(nil), "openapi.v2.NamedResponse") + proto.RegisterType((*NamedResponseValue)(nil), "openapi.v2.NamedResponseValue") + proto.RegisterType((*NamedSchema)(nil), "openapi.v2.NamedSchema") + proto.RegisterType((*NamedSecurityDefinitionsItem)(nil), "openapi.v2.NamedSecurityDefinitionsItem") + proto.RegisterType((*NamedString)(nil), "openapi.v2.NamedString") + proto.RegisterType((*NamedStringArray)(nil), "openapi.v2.NamedStringArray") + proto.RegisterType((*NonBodyParameter)(nil), "openapi.v2.NonBodyParameter") + proto.RegisterType((*Oauth2AccessCodeSecurity)(nil), "openapi.v2.Oauth2AccessCodeSecurity") + proto.RegisterType((*Oauth2ApplicationSecurity)(nil), "openapi.v2.Oauth2ApplicationSecurity") + proto.RegisterType((*Oauth2ImplicitSecurity)(nil), "openapi.v2.Oauth2ImplicitSecurity") + proto.RegisterType((*Oauth2PasswordSecurity)(nil), "openapi.v2.Oauth2PasswordSecurity") + proto.RegisterType((*Oauth2Scopes)(nil), "openapi.v2.Oauth2Scopes") + proto.RegisterType((*Operation)(nil), "openapi.v2.Operation") + proto.RegisterType((*Parameter)(nil), "openapi.v2.Parameter") + proto.RegisterType((*ParameterDefinitions)(nil), "openapi.v2.ParameterDefinitions") + proto.RegisterType((*ParametersItem)(nil), "openapi.v2.ParametersItem") + proto.RegisterType((*PathItem)(nil), "openapi.v2.PathItem") + proto.RegisterType((*PathParameterSubSchema)(nil), "openapi.v2.PathParameterSubSchema") + proto.RegisterType((*Paths)(nil), "openapi.v2.Paths") + proto.RegisterType((*PrimitivesItems)(nil), "openapi.v2.PrimitivesItems") + proto.RegisterType((*Properties)(nil), "openapi.v2.Properties") + proto.RegisterType((*QueryParameterSubSchema)(nil), "openapi.v2.QueryParameterSubSchema") + proto.RegisterType((*Response)(nil), "openapi.v2.Response") + proto.RegisterType((*ResponseDefinitions)(nil), "openapi.v2.ResponseDefinitions") + proto.RegisterType((*ResponseValue)(nil), "openapi.v2.ResponseValue") + proto.RegisterType((*Responses)(nil), "openapi.v2.Responses") + proto.RegisterType((*Schema)(nil), "openapi.v2.Schema") + proto.RegisterType((*SchemaItem)(nil), "openapi.v2.SchemaItem") + proto.RegisterType((*SecurityDefinitions)(nil), "openapi.v2.SecurityDefinitions") + proto.RegisterType((*SecurityDefinitionsItem)(nil), "openapi.v2.SecurityDefinitionsItem") + proto.RegisterType((*SecurityRequirement)(nil), "openapi.v2.SecurityRequirement") + proto.RegisterType((*StringArray)(nil), "openapi.v2.StringArray") + proto.RegisterType((*Tag)(nil), "openapi.v2.Tag") + proto.RegisterType((*TypeItem)(nil), "openapi.v2.TypeItem") + proto.RegisterType((*VendorExtension)(nil), "openapi.v2.VendorExtension") + proto.RegisterType((*Xml)(nil), "openapi.v2.Xml") +} + +func init() { proto.RegisterFile("OpenAPIv2/OpenAPIv2.proto", fileDescriptor0) } + +var fileDescriptor0 = []byte{ + // 3129 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0xec, 0x3b, 0x4b, 0x73, 0x1c, 0x57, + 0xd5, 0xf3, 0x7e, 0x1c, 0x69, 0x46, 0xa3, 0x96, 0x2c, 0xb7, 0x24, 0xc7, 0x71, 0xe4, 0x3c, 0x6c, + 0xe7, 0xb3, 0x9c, 0x4f, 0x29, 0x48, 0x05, 0x2a, 0x05, 0xf2, 0xab, 0xc6, 0xc4, 0x44, 0x4a, 0xcb, + 0x0e, 0x09, 0x04, 0xba, 0xae, 0x66, 0xee, 0x48, 0x9d, 0x74, 0xf7, 0x6d, 0x77, 0xf7, 0xc8, 0x1a, + 0x16, 0x2c, 0xa0, 0x8a, 0x35, 0x50, 0x59, 0x53, 0x15, 0x16, 0x14, 0x55, 0x59, 0xb0, 0x62, 0xc5, + 0x1f, 0x60, 0xc7, 0x3f, 0x60, 0x0d, 0x5b, 0xaa, 0x58, 0x51, 0x3c, 0xea, 0xbe, 0xfa, 0x31, 0x7d, + 0x7b, 0x1e, 0x96, 0x0b, 0x28, 0xd0, 0x6a, 0xe6, 0xde, 0x73, 0xee, 0xb9, 0xa7, 0x4f, 0x9f, 0xd7, + 0x3d, 0xe7, 0x36, 0xac, 0xef, 0x79, 0xd8, 0xdd, 0xdd, 0x7f, 0x70, 0xb2, 0x73, 0x2b, 0xfa, 0xb7, + 0xed, 0xf9, 0x24, 0x24, 0x1a, 0x10, 0x0f, 0xbb, 0xc8, 0xb3, 0xb6, 0x4f, 0x76, 0x36, 0xd6, 0x8f, + 0x08, 0x39, 0xb2, 0xf1, 0x2d, 0x06, 0x39, 0x1c, 0x0e, 0x6e, 0x21, 0x77, 0xc4, 0xd1, 0xb6, 0x1c, + 0xd0, 0x77, 0xfb, 0x7d, 0x2b, 0xb4, 0x88, 0x8b, 0xec, 0x7d, 0x9f, 0x78, 0xd8, 0x0f, 0x2d, 0x1c, + 0x3c, 0x08, 0xb1, 0xa3, 0xfd, 0x1f, 0xd4, 0x82, 0xde, 0x31, 0x76, 0x90, 0x5e, 0xbc, 0x52, 0xbc, + 0xb6, 0xb0, 0xa3, 0x6d, 0xc7, 0x34, 0xb7, 0x0f, 0x18, 0xa4, 0x5b, 0x30, 0x04, 0x8e, 0xb6, 0x01, + 0xf5, 0x43, 0x42, 0x6c, 0x8c, 0x5c, 0xbd, 0x74, 0xa5, 0x78, 0xad, 0xd1, 0x2d, 0x18, 0x72, 0xe2, + 0x76, 0x1d, 0xaa, 0xc4, 0xc5, 0x64, 0xb0, 0x75, 0x0f, 0xca, 0xbb, 0xee, 0x48, 0xbb, 0x01, 0xd5, + 0x13, 0x64, 0x0f, 0xb1, 0x20, 0xbc, 0xba, 0xcd, 0x19, 0xdc, 0x96, 0x0c, 0x6e, 0xef, 0xba, 0x23, + 0x83, 0xa3, 0x68, 0x1a, 0x54, 0x46, 0xc8, 0xb1, 0x19, 0xd1, 0xa6, 0xc1, 0xfe, 0x6f, 0x7d, 0x51, + 0x84, 0xf6, 0xae, 0x67, 0xbd, 0x8b, 0x47, 0x07, 0xb8, 0x37, 0xf4, 0xad, 0x70, 0x44, 0xd1, 0xc2, + 0x91, 0xc7, 0x29, 0x36, 0x0d, 0xf6, 0x9f, 0xce, 0xb9, 0xc8, 0xc1, 0x72, 0x29, 0xfd, 0xaf, 0xb5, + 0xa1, 0x64, 0xb9, 0x7a, 0x99, 0xcd, 0x94, 0x2c, 0x57, 0xbb, 0x02, 0x0b, 0x7d, 0x1c, 0xf4, 0x7c, + 0xcb, 0xa3, 0x32, 0xd0, 0x2b, 0x0c, 0x90, 0x9c, 0xd2, 0xbe, 0x06, 0x9d, 0x13, 0xec, 0xf6, 0x89, + 0x6f, 0xe2, 0xd3, 0x10, 0xbb, 0x01, 0x45, 0xab, 0x5e, 0x29, 0x33, 0xbe, 0x13, 0x02, 0x79, 0x0f, + 0x39, 0xb8, 0x4f, 0xf9, 0x5e, 0xe2, 0xd8, 0xf7, 0x24, 0xf2, 0xd6, 0x67, 0x45, 0xd8, 0xbc, 0x8d, + 0x02, 0xab, 0xb7, 0x3b, 0x0c, 0x8f, 0xb1, 0x1b, 0x5a, 0x3d, 0x44, 0x09, 0x4f, 0x64, 0x7d, 0x8c, + 0xad, 0xd2, 0x6c, 0x6c, 0x95, 0xe7, 0x61, 0xeb, 0x0f, 0x45, 0x68, 0xdd, 0x26, 0xfd, 0xd1, 0x3e, + 0xf2, 0x91, 0x83, 0x43, 0xec, 0x8f, 0x6f, 0x5a, 0xcc, 0x6e, 0x3a, 0x8b, 0x44, 0x37, 0xa0, 0xe1, + 0xe3, 0x27, 0x43, 0xcb, 0xc7, 0x7d, 0x26, 0xce, 0x86, 0x11, 0x8d, 0xb5, 0x1b, 0x91, 0x4a, 0x55, + 0xf3, 0x54, 0x2a, 0x52, 0x28, 0xd5, 0x03, 0xd6, 0xe6, 0x79, 0xc0, 0x1f, 0x17, 0xa1, 0x7e, 0x87, + 0xb8, 0x21, 0xea, 0x85, 0x11, 0xe3, 0xc5, 0x04, 0xe3, 0x1d, 0x28, 0x0f, 0x7d, 0xa9, 0x58, 0xf4, + 0xaf, 0xb6, 0x0a, 0x55, 0xec, 0x20, 0xcb, 0x16, 0x4f, 0xc3, 0x07, 0x4a, 0x46, 0x2a, 0xf3, 0x30, + 0xf2, 0x08, 0xea, 0x77, 0xf1, 0x00, 0x0d, 0xed, 0x50, 0x7b, 0x00, 0x17, 0x50, 0x64, 0x6f, 0xa6, + 0x17, 0x19, 0x9c, 0x5e, 0x9c, 0x40, 0x70, 0x15, 0x29, 0x4c, 0x74, 0xeb, 0x3b, 0xb0, 0x70, 0x17, + 0x0f, 0x2c, 0x97, 0x41, 0x02, 0xed, 0xe1, 0x64, 0xca, 0x17, 0x33, 0x94, 0x85, 0xb8, 0xd5, 0xc4, + 0xff, 0x58, 0x85, 0xc6, 0x5d, 0xd2, 0x1b, 0x3a, 0xd8, 0x0d, 0x35, 0x1d, 0xea, 0xc1, 0x53, 0x74, + 0x74, 0x84, 0x7d, 0x21, 0x3f, 0x39, 0xd4, 0x5e, 0x86, 0x8a, 0xe5, 0x0e, 0x08, 0x93, 0xe1, 0xc2, + 0x4e, 0x27, 0xb9, 0xc7, 0x03, 0x77, 0x40, 0x0c, 0x06, 0xa5, 0xc2, 0x3f, 0x26, 0x41, 0x28, 0xa4, + 0xca, 0xfe, 0x6b, 0x9b, 0xd0, 0x3c, 0x44, 0x01, 0x36, 0x3d, 0x14, 0x1e, 0x0b, 0xab, 0x6b, 0xd0, + 0x89, 0x7d, 0x14, 0x1e, 0xb3, 0x0d, 0x29, 0x77, 0x38, 0x60, 0x96, 0x46, 0x37, 0xe4, 0x43, 0xaa, + 0x5c, 0x3d, 0xe2, 0x06, 0x43, 0x0a, 0xaa, 0x31, 0x50, 0x34, 0xa6, 0x30, 0xcf, 0x27, 0xfd, 0x61, + 0x0f, 0x07, 0x7a, 0x9d, 0xc3, 0xe4, 0x58, 0x7b, 0x0d, 0xaa, 0x74, 0xa7, 0x40, 0x6f, 0x30, 0x4e, + 0x97, 0x93, 0x9c, 0xd2, 0x2d, 0x03, 0x83, 0xc3, 0xb5, 0xb7, 0xa9, 0x0d, 0x44, 0x52, 0xd5, 0x9b, + 0x0c, 0x3d, 0x25, 0xbc, 0x84, 0xd0, 0x8d, 0x24, 0xae, 0xf6, 0x75, 0x00, 0x4f, 0xda, 0x52, 0xa0, + 0x03, 0x5b, 0x79, 0x25, 0xbd, 0x91, 0x80, 0x26, 0x49, 0x24, 0xd6, 0x68, 0xef, 0x40, 0xd3, 0xc7, + 0x81, 0x47, 0xdc, 0x00, 0x07, 0xfa, 0x02, 0x23, 0xf0, 0x62, 0x92, 0x80, 0x21, 0x80, 0xc9, 0xf5, + 0xf1, 0x0a, 0xed, 0xab, 0xd0, 0x08, 0x84, 0x53, 0xd1, 0x17, 0xd9, 0x5b, 0x4f, 0xad, 0x96, 0x0e, + 0xc7, 0xe0, 0xd6, 0x48, 0x5f, 0xad, 0x11, 0x2d, 0xd0, 0x0c, 0x58, 0x95, 0xff, 0xcd, 0xa4, 0x04, + 0x5a, 0x59, 0x36, 0x24, 0xa1, 0x24, 0x1b, 0x2b, 0x41, 0x76, 0x52, 0xbb, 0x0a, 0x95, 0x10, 0x1d, + 0x05, 0x7a, 0x9b, 0x31, 0xb3, 0x94, 0xa4, 0xf1, 0x08, 0x1d, 0x19, 0x0c, 0xa8, 0xbd, 0x03, 0x2d, + 0x6a, 0x57, 0x3e, 0x55, 0xdb, 0x3e, 0xe9, 0x05, 0xfa, 0x12, 0xdb, 0x51, 0x4f, 0x62, 0xdf, 0x13, + 0x08, 0x77, 0x49, 0x2f, 0x30, 0x16, 0x71, 0x62, 0xa4, 0xb4, 0xce, 0xce, 0x3c, 0xd6, 0xf9, 0x18, + 0x1a, 0xf7, 0x4e, 0x91, 0xe3, 0xd9, 0x38, 0x78, 0x9e, 0xe6, 0xf9, 0xa3, 0x22, 0x2c, 0x26, 0xd9, + 0x9e, 0xc1, 0xbb, 0x66, 0x1d, 0xd2, 0x99, 0x9d, 0xfc, 0x3f, 0x4a, 0x00, 0xf7, 0x2d, 0x1b, 0x73, + 0x63, 0xd7, 0xd6, 0xa0, 0x36, 0x20, 0xbe, 0x83, 0x42, 0xb1, 0xbd, 0x18, 0x51, 0xc7, 0x17, 0x5a, + 0xa1, 0x2d, 0x1d, 0x3b, 0x1f, 0x8c, 0x73, 0x5c, 0xce, 0x72, 0x7c, 0x1d, 0xea, 0x7d, 0xee, 0xd9, + 0x98, 0x0d, 0x8f, 0xbd, 0x63, 0xca, 0x91, 0x84, 0xa7, 0xc2, 0x02, 0x37, 0xea, 0x38, 0x2c, 0xc8, + 0x08, 0x58, 0x4b, 0x44, 0xc0, 0x4d, 0x6a, 0x0b, 0xa8, 0x6f, 0x12, 0xd7, 0x1e, 0xe9, 0x75, 0x19, + 0x47, 0x50, 0x7f, 0xcf, 0xb5, 0x47, 0x59, 0x9d, 0x69, 0xcc, 0xa5, 0x33, 0xd7, 0xa1, 0x8e, 0xf9, + 0x2b, 0x17, 0x06, 0x9e, 0x65, 0x5b, 0xc0, 0x95, 0x6f, 0x00, 0xe6, 0x79, 0x03, 0x5f, 0xd4, 0x60, + 0xe3, 0x3e, 0xf1, 0x9d, 0xbb, 0x28, 0x44, 0x91, 0x03, 0x38, 0x18, 0x1e, 0x1e, 0xc8, 0xb4, 0x29, + 0x16, 0x4b, 0x71, 0x2c, 0x5a, 0xf2, 0xc8, 0x5a, 0xca, 0xcb, 0x55, 0xca, 0xf9, 0xf1, 0xb9, 0x92, + 0x08, 0x73, 0x37, 0x60, 0x19, 0xd9, 0x36, 0x79, 0x6a, 0x62, 0xc7, 0x0b, 0x47, 0x26, 0x4f, 0xbc, + 0xaa, 0x6c, 0xab, 0x25, 0x06, 0xb8, 0x47, 0xe7, 0x3f, 0x90, 0xc9, 0x56, 0xe6, 0x45, 0xc4, 0x3a, + 0x53, 0x4f, 0xe9, 0xcc, 0xff, 0x43, 0xd5, 0x0a, 0xb1, 0x23, 0x65, 0xbf, 0x99, 0xf2, 0x74, 0xbe, + 0xe5, 0x58, 0xa1, 0x75, 0xc2, 0x33, 0xc9, 0xc0, 0xe0, 0x98, 0xda, 0xeb, 0xb0, 0xdc, 0x23, 0xb6, + 0x8d, 0x7b, 0x94, 0x59, 0x53, 0x50, 0x6d, 0x32, 0xaa, 0x9d, 0x18, 0x70, 0x9f, 0xd3, 0x4f, 0xe8, + 0x16, 0x4c, 0xd1, 0x2d, 0x1d, 0xea, 0x0e, 0x3a, 0xb5, 0x9c, 0xa1, 0xc3, 0xbc, 0x66, 0xd1, 0x90, + 0x43, 0xba, 0x23, 0x3e, 0xed, 0xd9, 0xc3, 0xc0, 0x3a, 0xc1, 0xa6, 0xc4, 0x59, 0x64, 0x0f, 0xdf, + 0x89, 0x00, 0xdf, 0x14, 0xc8, 0x94, 0x8c, 0xe5, 0x32, 0x94, 0x96, 0x20, 0xc3, 0x87, 0x63, 0x64, + 0x04, 0x4e, 0x7b, 0x9c, 0x8c, 0x40, 0x7e, 0x01, 0xc0, 0x41, 0xa7, 0xa6, 0x8d, 0xdd, 0xa3, 0xf0, + 0x98, 0x79, 0xb3, 0xb2, 0xd1, 0x74, 0xd0, 0xe9, 0x43, 0x36, 0xc1, 0xc0, 0x96, 0x2b, 0xc1, 0x1d, + 0x01, 0xb6, 0x5c, 0x01, 0xd6, 0xa1, 0xee, 0xa1, 0x90, 0x2a, 0xab, 0xbe, 0xcc, 0x83, 0xad, 0x18, + 0x52, 0x8b, 0xa0, 0x74, 0xb9, 0xd0, 0x35, 0xb6, 0xae, 0xe1, 0xa0, 0x53, 0x26, 0x61, 0x06, 0xb4, + 0x5c, 0x01, 0x5c, 0x11, 0x40, 0xcb, 0xe5, 0xc0, 0x97, 0x60, 0x71, 0xe8, 0x5a, 0x4f, 0x86, 0x58, + 0xc0, 0x57, 0x19, 0xe7, 0x0b, 0x7c, 0x8e, 0xa3, 0x5c, 0x85, 0x0a, 0x76, 0x87, 0x8e, 0x7e, 0x21, + 0xeb, 0xaa, 0xa9, 0xa8, 0x19, 0x50, 0x7b, 0x11, 0x16, 0x9c, 0xa1, 0x1d, 0x5a, 0x9e, 0x8d, 0x4d, + 0x32, 0xd0, 0xd7, 0x98, 0x90, 0x40, 0x4e, 0xed, 0x0d, 0x94, 0xd6, 0x72, 0x71, 0x2e, 0x6b, 0xa9, + 0x42, 0xad, 0x8b, 0x51, 0x1f, 0xfb, 0xca, 0xb4, 0x38, 0xd6, 0xc5, 0x92, 0x5a, 0x17, 0xcb, 0x67, + 0xd3, 0xc5, 0xca, 0x74, 0x5d, 0xac, 0xce, 0xae, 0x8b, 0xb5, 0x19, 0x74, 0xb1, 0x3e, 0x5d, 0x17, + 0x1b, 0x33, 0xe8, 0x62, 0x73, 0x26, 0x5d, 0x84, 0xc9, 0xba, 0xb8, 0x30, 0x41, 0x17, 0x17, 0x27, + 0xe8, 0x62, 0x6b, 0x92, 0x2e, 0xb6, 0xa7, 0xe8, 0xe2, 0x52, 0xbe, 0x2e, 0x76, 0xe6, 0xd0, 0xc5, + 0xe5, 0x8c, 0x2e, 0x8e, 0x79, 0x4b, 0x6d, 0xb6, 0x23, 0xd4, 0xca, 0x3c, 0xda, 0xfa, 0xb7, 0x2a, + 0xe8, 0x5c, 0x5b, 0xff, 0x2d, 0x9e, 0x5d, 0x5a, 0x48, 0x55, 0x69, 0x21, 0x35, 0xb5, 0x85, 0xd4, + 0xcf, 0x66, 0x21, 0x8d, 0xe9, 0x16, 0xd2, 0x9c, 0xdd, 0x42, 0x60, 0x06, 0x0b, 0x59, 0x98, 0x6e, + 0x21, 0x8b, 0x33, 0x58, 0x48, 0x6b, 0x26, 0x0b, 0x69, 0x4f, 0xb6, 0x90, 0xa5, 0x09, 0x16, 0xd2, + 0x99, 0x60, 0x21, 0xcb, 0x93, 0x2c, 0x44, 0x9b, 0x62, 0x21, 0x2b, 0xf9, 0x16, 0xb2, 0x3a, 0x87, + 0x85, 0x5c, 0x98, 0xc9, 0x5b, 0xaf, 0xcd, 0xa3, 0xff, 0xdf, 0x82, 0x3a, 0x57, 0xff, 0x67, 0x38, + 0x7e, 0xf2, 0x85, 0x39, 0xc9, 0xf3, 0xe7, 0x25, 0xa8, 0xd0, 0x03, 0x64, 0x9c, 0x98, 0x16, 0x93, + 0x89, 0xa9, 0x0e, 0xf5, 0x13, 0xec, 0x07, 0x71, 0x65, 0x44, 0x0e, 0x67, 0x30, 0xa4, 0x6b, 0xd0, + 0x09, 0xb1, 0xef, 0x04, 0x26, 0x19, 0x98, 0x01, 0xf6, 0x4f, 0xac, 0x9e, 0x34, 0xaa, 0x36, 0x9b, + 0xdf, 0x1b, 0x1c, 0xf0, 0x59, 0xed, 0x26, 0xd4, 0x7b, 0xbc, 0x7c, 0x20, 0x9c, 0xfe, 0x4a, 0xf2, + 0x21, 0x44, 0x65, 0xc1, 0x90, 0x38, 0x14, 0xdd, 0xb6, 0x7a, 0xd8, 0x0d, 0x78, 0xfa, 0x34, 0x86, + 0xfe, 0x90, 0x83, 0x0c, 0x89, 0xa3, 0x14, 0x7e, 0x7d, 0x1e, 0xe1, 0xbf, 0x05, 0x4d, 0xa6, 0x0c, + 0xac, 0x56, 0x77, 0x23, 0x51, 0xab, 0x2b, 0x4f, 0x2e, 0xac, 0x6c, 0xdd, 0x85, 0xd6, 0x37, 0x02, + 0xe2, 0x1a, 0x78, 0x80, 0x7d, 0xec, 0xf6, 0xb0, 0xb6, 0x0c, 0x15, 0xd3, 0xc7, 0x03, 0x21, 0xe3, + 0xb2, 0x81, 0x07, 0xd3, 0xeb, 0x4f, 0x5b, 0x1e, 0xd4, 0xc5, 0x33, 0xcd, 0x58, 0x5c, 0x39, 0xf3, + 0x59, 0xe6, 0x1e, 0x34, 0x24, 0x50, 0xb9, 0xe5, 0x2b, 0xb2, 0xaa, 0x58, 0x52, 0x3b, 0x20, 0x0e, + 0xdd, 0x7a, 0x17, 0x16, 0x12, 0x0a, 0xa8, 0xa4, 0x74, 0x2d, 0x4d, 0x29, 0x25, 0x4c, 0xa1, 0xb7, + 0x82, 0xd8, 0xfb, 0xd0, 0x66, 0xc4, 0xe2, 0x22, 0x9a, 0x8a, 0xde, 0xeb, 0x69, 0x7a, 0x17, 0x94, + 0x45, 0x01, 0x49, 0x72, 0x0f, 0x5a, 0x82, 0x64, 0x78, 0xcc, 0xde, 0xad, 0x8a, 0xe2, 0x8d, 0x34, + 0xc5, 0xd5, 0xf1, 0x7a, 0x06, 0x5d, 0x38, 0x4e, 0x50, 0x56, 0x0f, 0xe6, 0x26, 0x28, 0x17, 0x4a, + 0x82, 0x1f, 0x81, 0x96, 0x22, 0x18, 0x9d, 0x1d, 0x32, 0x54, 0x6f, 0xa5, 0xa9, 0xae, 0xab, 0xa8, + 0xb2, 0xd5, 0xe3, 0x2f, 0x47, 0xc4, 0xd0, 0x79, 0x5f, 0x8e, 0xd0, 0x74, 0x41, 0xcc, 0x81, 0x4b, + 0x9c, 0x58, 0xb6, 0x34, 0x91, 0x2b, 0xd8, 0xb7, 0xd3, 0xd4, 0xaf, 0x4e, 0xa9, 0x7b, 0x24, 0xe5, + 0xfc, 0x96, 0xe4, 0x3d, 0xf4, 0x2d, 0xf7, 0x48, 0x49, 0x7d, 0x35, 0x49, 0xbd, 0x29, 0x17, 0x3e, + 0x86, 0x4e, 0x62, 0xe1, 0xae, 0xef, 0x23, 0xb5, 0x82, 0xdf, 0x4c, 0xf3, 0x96, 0xf2, 0xa9, 0x89, + 0xb5, 0x92, 0xec, 0x6f, 0xca, 0xd0, 0x79, 0x8f, 0xb8, 0xe9, 0x1a, 0x2f, 0x86, 0xcd, 0x63, 0xa6, + 0xc1, 0x66, 0x54, 0x77, 0x32, 0x83, 0xe1, 0xa1, 0x99, 0xaa, 0xf4, 0xbf, 0x9c, 0x55, 0xf8, 0x6c, + 0x82, 0xd3, 0x2d, 0x18, 0xfa, 0x71, 0x5e, 0xf2, 0x63, 0xc3, 0x65, 0x9a, 0x30, 0x98, 0x7d, 0x14, + 0x22, 0xf5, 0x4e, 0xfc, 0x19, 0x5e, 0x4d, 0xee, 0x94, 0x7f, 0x4c, 0xee, 0x16, 0x8c, 0x8d, 0x41, + 0xfe, 0x21, 0xfa, 0x10, 0x36, 0x9e, 0x0c, 0xb1, 0x3f, 0x52, 0xef, 0x54, 0xce, 0xbe, 0xc9, 0xf7, + 0x29, 0xb6, 0x72, 0x9b, 0x8b, 0x4f, 0xd4, 0x20, 0xcd, 0x84, 0x75, 0x0f, 0x85, 0xc7, 0xea, 0x2d, + 0x78, 0xf1, 0x63, 0x6b, 0xdc, 0x0a, 0x95, 0x3b, 0xac, 0x79, 0x4a, 0x48, 0xdc, 0x24, 0xf9, 0xbc, + 0x04, 0xfa, 0x1e, 0x1a, 0x86, 0xc7, 0x3b, 0xbb, 0xbd, 0x1e, 0x0e, 0x82, 0x3b, 0xa4, 0x8f, 0xa7, + 0xf5, 0x39, 0x06, 0x36, 0x79, 0x2a, 0xab, 0xf2, 0xf4, 0xbf, 0xf6, 0x06, 0x0d, 0x08, 0xc4, 0xc3, + 0xf2, 0x48, 0x94, 0x2a, 0x8d, 0x70, 0xea, 0x07, 0x0c, 0x6e, 0x08, 0x3c, 0x9a, 0x35, 0xd1, 0x69, + 0xe2, 0x5b, 0xdf, 0x67, 0xfd, 0x09, 0x93, 0xfa, 0x6f, 0x71, 0x20, 0x4a, 0x01, 0x1e, 0xfb, 0x36, + 0x4d, 0x60, 0x42, 0xf2, 0x29, 0xe6, 0x48, 0x3c, 0xff, 0x6c, 0xb0, 0x09, 0x0a, 0x1c, 0x0b, 0x1e, + 0xb5, 0xd9, 0x32, 0xef, 0xb9, 0x82, 0xdf, 0x5f, 0x8a, 0xb0, 0x2e, 0x64, 0xe4, 0x79, 0xf6, 0x2c, + 0x1d, 0x95, 0xe7, 0x23, 0xa4, 0xd4, 0x73, 0x57, 0x26, 0x3f, 0x77, 0x75, 0xb6, 0xe7, 0x9e, 0xab, + 0xa7, 0xf1, 0xc3, 0x12, 0xac, 0x71, 0xc6, 0x1e, 0x38, 0xf4, 0xb9, 0xad, 0xf0, 0x3f, 0x4d, 0x33, + 0xfe, 0x05, 0x42, 0xf8, 0x73, 0x51, 0x0a, 0x61, 0x1f, 0x05, 0xc1, 0x53, 0xe2, 0xf7, 0xff, 0x07, + 0xde, 0xfc, 0xc7, 0xb0, 0x98, 0xe4, 0xeb, 0x19, 0xfa, 0x3d, 0x2c, 0x42, 0xe4, 0x24, 0xdc, 0x3f, + 0xaf, 0x40, 0x73, 0xcf, 0xc3, 0x3e, 0x92, 0x87, 0x4d, 0x56, 0xb7, 0x2f, 0xb2, 0x3a, 0x2d, 0x2f, + 0xd3, 0xeb, 0x50, 0x0f, 0x86, 0x8e, 0x83, 0xfc, 0x91, 0xcc, 0xb9, 0xc5, 0x70, 0x86, 0x9c, 0x3b, + 0x53, 0xae, 0xad, 0xcc, 0x55, 0xae, 0x7d, 0x09, 0x16, 0x89, 0xe4, 0xcd, 0xb4, 0xfa, 0x52, 0xbc, + 0xd1, 0xdc, 0x83, 0x7e, 0xaa, 0xf7, 0x53, 0x1b, 0xeb, 0xfd, 0x24, 0x7b, 0x46, 0xf5, 0xb1, 0x9e, + 0xd1, 0x57, 0x52, 0x3d, 0x9b, 0x06, 0x13, 0xdd, 0x86, 0x32, 0x3d, 0xe3, 0xa1, 0x3e, 0xd9, 0xad, + 0x79, 0x33, 0xd9, 0xad, 0x69, 0x66, 0x33, 0x3b, 0x99, 0xe0, 0xa4, 0x7a, 0x34, 0x89, 0xd6, 0x16, + 0xa4, 0x5b, 0x5b, 0x97, 0x01, 0xfa, 0xd8, 0xf3, 0x71, 0x0f, 0x85, 0xb8, 0x2f, 0x4e, 0xbd, 0x89, + 0x99, 0xb3, 0x75, 0x77, 0x54, 0xea, 0xd7, 0x9a, 0x47, 0xfd, 0x7e, 0x59, 0x84, 0x66, 0x9c, 0x45, + 0xdc, 0x86, 0xf6, 0x21, 0xe9, 0x27, 0xe2, 0xad, 0x48, 0x1c, 0x52, 0x09, 0x5e, 0x2a, 0xf1, 0xe8, + 0x16, 0x8c, 0xd6, 0x61, 0x2a, 0x13, 0x79, 0x08, 0x9a, 0x4b, 0x5c, 0x73, 0x8c, 0x0e, 0x4f, 0x0b, + 0x2e, 0xa5, 0x98, 0x1a, 0xcb, 0x61, 0xba, 0x05, 0xa3, 0xe3, 0x8e, 0xcd, 0xc5, 0xd1, 0xf3, 0x08, + 0x56, 0x55, 0x7d, 0x36, 0x6d, 0x6f, 0xb2, 0xbd, 0x6c, 0x64, 0xc4, 0x10, 0x27, 0xe6, 0x6a, 0x93, + 0xf9, 0xac, 0x08, 0xed, 0xb4, 0x76, 0x68, 0x5f, 0x82, 0xe6, 0xb8, 0x44, 0xd4, 0xb9, 0x7e, 0xb7, + 0x60, 0xc4, 0x98, 0x54, 0x9a, 0x9f, 0x04, 0xc4, 0xa5, 0x67, 0x30, 0x7e, 0x22, 0x53, 0xa5, 0xcb, + 0xa9, 0x23, 0x1b, 0x95, 0xe6, 0x27, 0xc9, 0x89, 0xf8, 0xf9, 0x7f, 0x5f, 0x86, 0x46, 0x74, 0x74, + 0x50, 0x9c, 0xec, 0x5e, 0x83, 0xf2, 0x11, 0x0e, 0x55, 0x27, 0x91, 0xc8, 0xfe, 0x0d, 0x8a, 0x41, + 0x11, 0xbd, 0x61, 0x28, 0xfc, 0x63, 0x1e, 0xa2, 0x37, 0x0c, 0xb5, 0xeb, 0x50, 0xf1, 0x48, 0x20, + 0x3b, 0x40, 0x39, 0x98, 0x0c, 0x45, 0xbb, 0x09, 0xb5, 0x3e, 0xb6, 0x71, 0x88, 0xc5, 0x89, 0x3a, + 0x07, 0x59, 0x20, 0x69, 0xb7, 0xa0, 0x4e, 0x3c, 0xde, 0x86, 0xac, 0x4d, 0xc2, 0x97, 0x58, 0x94, + 0x15, 0x9a, 0x92, 0x8a, 0x22, 0x57, 0x1e, 0x2b, 0x14, 0x85, 0x9e, 0xc9, 0x3c, 0x14, 0xf6, 0x8e, + 0x45, 0xfb, 0x22, 0x07, 0x97, 0xe3, 0x8c, 0xb9, 0x89, 0xe6, 0x5c, 0x6e, 0xe2, 0xcc, 0x1d, 0xa4, + 0xbf, 0x56, 0x61, 0x4d, 0x9d, 0x4d, 0x9e, 0xd7, 0x18, 0xcf, 0x6b, 0x8c, 0xff, 0xed, 0x35, 0xc6, + 0xa7, 0x50, 0x65, 0x17, 0x34, 0x94, 0x94, 0x8a, 0x73, 0x50, 0xd2, 0x6e, 0x42, 0x85, 0xdd, 0x36, + 0x29, 0xb1, 0x45, 0xeb, 0x0a, 0x87, 0x2f, 0xea, 0x26, 0x0c, 0x6d, 0xeb, 0x67, 0x55, 0x58, 0x1a, + 0xd3, 0xda, 0xf3, 0x9e, 0xd4, 0x79, 0x4f, 0xea, 0x4c, 0x3d, 0x29, 0x95, 0x0e, 0x6b, 0xf3, 0x58, + 0xc3, 0xb7, 0x01, 0xe2, 0x14, 0xe4, 0x39, 0xdf, 0xf9, 0xfa, 0x55, 0x0d, 0x2e, 0xe6, 0x14, 0x46, + 0xce, 0xaf, 0x29, 0x9c, 0x5f, 0x53, 0x38, 0xbf, 0xa6, 0x10, 0x9b, 0xe1, 0xdf, 0x8b, 0xd0, 0x88, + 0xca, 0xe9, 0xd3, 0x2f, 0x76, 0x6d, 0x47, 0xdd, 0x19, 0x9e, 0x76, 0xaf, 0x65, 0x6b, 0xd6, 0x2c, + 0xf0, 0xc8, 0xab, 0xaf, 0x37, 0xa1, 0xce, 0x2b, 0xab, 0x32, 0x78, 0xac, 0x64, 0x0b, 0xb2, 0x81, + 0x21, 0x71, 0xb4, 0x37, 0xa0, 0x21, 0xae, 0x2b, 0xc9, 0x93, 0xf5, 0x6a, 0xfa, 0x64, 0xcd, 0x61, + 0x46, 0x84, 0x75, 0xf6, 0x3b, 0xcd, 0x18, 0x56, 0x14, 0x97, 0x11, 0xb5, 0xf7, 0x26, 0x3b, 0xa4, + 0x6c, 0xcc, 0x8d, 0x5a, 0x0b, 0x6a, 0x97, 0xf4, 0x93, 0x22, 0xb4, 0xd2, 0x5d, 0x86, 0x1d, 0xea, + 0x88, 0xf8, 0x44, 0x74, 0x7b, 0x5c, 0x71, 0xe6, 0xee, 0x16, 0x8c, 0x08, 0xef, 0xf9, 0x9e, 0xaf, + 0x7e, 0x5a, 0x84, 0x66, 0x74, 0xb2, 0xd7, 0xee, 0x40, 0x4b, 0x6e, 0x63, 0xf6, 0x48, 0x1f, 0x8b, + 0x07, 0xbd, 0x9c, 0xfb, 0xa0, 0xbc, 0xdb, 0xb1, 0x28, 0x17, 0xdd, 0x21, 0x7d, 0x75, 0x2b, 0xb0, + 0x34, 0xcf, 0xdb, 0xf8, 0x75, 0x13, 0x6a, 0xc2, 0x51, 0x2b, 0x4e, 0x7c, 0x79, 0x09, 0x4a, 0xd4, + 0x5b, 0x2d, 0x4f, 0xb8, 0xf4, 0x57, 0x99, 0x78, 0xe9, 0x6f, 0x5a, 0xe2, 0x31, 0x66, 0x89, 0xb5, + 0x8c, 0x25, 0x26, 0x5c, 0x62, 0x7d, 0x06, 0x97, 0xd8, 0x98, 0xee, 0x12, 0x9b, 0x33, 0xb8, 0x44, + 0x98, 0xc9, 0x25, 0x2e, 0x4c, 0x76, 0x89, 0x8b, 0x13, 0x5c, 0x62, 0x6b, 0x82, 0x4b, 0x6c, 0x4f, + 0x72, 0x89, 0x4b, 0x53, 0x5c, 0x62, 0x27, 0xeb, 0x12, 0x5f, 0x81, 0x36, 0x25, 0x9e, 0x30, 0x36, + 0x7e, 0x12, 0x68, 0x39, 0xe8, 0x34, 0x91, 0x2b, 0x50, 0x34, 0xcb, 0x4d, 0xa2, 0x69, 0x02, 0xcd, + 0x72, 0x13, 0x68, 0xc9, 0x40, 0xbf, 0x32, 0x76, 0x4d, 0x73, 0xa6, 0x13, 0xc1, 0x47, 0x79, 0x2e, + 0xe0, 0x42, 0xb6, 0xb5, 0x94, 0xf7, 0xe9, 0x89, 0xda, 0x1b, 0x68, 0xd7, 0x44, 0xd8, 0x5f, 0xcb, + 0xda, 0xfd, 0xa3, 0x91, 0x87, 0x79, 0xee, 0xce, 0x92, 0x81, 0xd7, 0x65, 0xd0, 0xbf, 0x98, 0x3d, + 0xdc, 0x47, 0x4d, 0x73, 0x19, 0xee, 0xaf, 0x43, 0x0d, 0xd9, 0x36, 0xd5, 0x4f, 0x3d, 0xb7, 0x77, + 0x5e, 0x45, 0xb6, 0xbd, 0x37, 0xd0, 0xbe, 0x0c, 0x90, 0x78, 0xa2, 0xf5, 0xac, 0x33, 0x8f, 0xb9, + 0x35, 0x12, 0x98, 0xda, 0xcb, 0xd0, 0xea, 0x5b, 0xd4, 0x82, 0x1c, 0xcb, 0x45, 0x21, 0xf1, 0xf5, + 0x0d, 0xa6, 0x20, 0xe9, 0xc9, 0xf4, 0x95, 0xd7, 0xcd, 0xb1, 0x2b, 0xaf, 0x2f, 0x41, 0xf9, 0xd4, + 0xb1, 0xf5, 0x4b, 0x59, 0x8b, 0xfb, 0xd0, 0xb1, 0x0d, 0x0a, 0xcb, 0x96, 0x59, 0x5f, 0x78, 0xd6, + 0x5b, 0xb1, 0x97, 0x9f, 0xe1, 0x56, 0xec, 0x8b, 0xf3, 0x78, 0xac, 0x1f, 0x00, 0xc4, 0x71, 0x6f, + 0xce, 0x2f, 0x8d, 0xde, 0x86, 0x85, 0x81, 0x65, 0x63, 0x33, 0x3f, 0xa4, 0xc6, 0x37, 0x9e, 0xbb, + 0x05, 0x03, 0x06, 0xd1, 0x28, 0xf6, 0xe2, 0x21, 0xac, 0x28, 0xba, 0xb9, 0xda, 0x77, 0x27, 0xc7, + 0xaf, 0x6b, 0xd9, 0x84, 0x3a, 0xa7, 0x25, 0xac, 0x0e, 0x67, 0x7f, 0xaa, 0xc0, 0xc5, 0xbc, 0x66, + 0xb4, 0x03, 0x2f, 0x1c, 0xa2, 0xc0, 0xea, 0x99, 0x28, 0xf5, 0x95, 0x90, 0x19, 0xd5, 0x7c, 0xb9, + 0x68, 0x5e, 0x4b, 0x55, 0x58, 0xf3, 0xbf, 0x2a, 0xea, 0x16, 0x8c, 0xcd, 0xc3, 0x09, 0x1f, 0x1d, + 0xdd, 0x87, 0x0e, 0xf2, 0x2c, 0xf3, 0x53, 0x3c, 0x8a, 0x77, 0xe0, 0x92, 0x4c, 0xd5, 0xb5, 0xd2, + 0x5f, 0x59, 0x75, 0x0b, 0x46, 0x1b, 0xa5, 0xbf, 0xbb, 0xfa, 0x1e, 0xe8, 0x84, 0xb5, 0x25, 0x4c, + 0x4b, 0x34, 0xa4, 0x62, 0x7a, 0xe5, 0x6c, 0x57, 0x54, 0xdd, 0xbb, 0xea, 0x16, 0x8c, 0x35, 0xa2, + 0xee, 0x6a, 0xc5, 0xf4, 0x3d, 0xd1, 0xeb, 0x89, 0xe9, 0x57, 0xf2, 0xe8, 0x8f, 0xb7, 0x85, 0x62, + 0xfa, 0x99, 0x86, 0xd1, 0x11, 0x6c, 0x0a, 0xfa, 0x28, 0x6e, 0x24, 0xc6, 0x5b, 0xf0, 0x00, 0xf7, + 0x4a, 0x76, 0x0b, 0x45, 0xdb, 0xb1, 0x5b, 0x30, 0xd6, 0x49, 0x6e, 0x4f, 0x12, 0xc7, 0x1b, 0xb1, + 0xae, 0x2e, 0x4b, 0x17, 0xe2, 0x8d, 0x6a, 0x59, 0xef, 0x98, 0xd7, 0x03, 0xee, 0x16, 0x0c, 0x21, + 0x93, 0x2c, 0x2c, 0xd6, 0xf0, 0xe3, 0x58, 0xc3, 0x13, 0x2d, 0x01, 0xed, 0xfd, 0xc9, 0x1a, 0x7e, + 0x29, 0xa7, 0x6d, 0xc4, 0x2f, 0x16, 0xa8, 0xb5, 0xfa, 0x2a, 0x2c, 0x24, 0x6f, 0x2e, 0xac, 0xc6, + 0x1f, 0xf7, 0x95, 0xe3, 0x3b, 0x0e, 0xbf, 0x2d, 0x42, 0xf9, 0x11, 0x52, 0xdf, 0x8a, 0x98, 0xfe, + 0xb1, 0x5b, 0xc6, 0xb3, 0x95, 0xcf, 0xfc, 0x8d, 0xc8, 0x5c, 0x5f, 0x70, 0x5d, 0x81, 0x86, 0x8c, + 0x30, 0x39, 0xcf, 0xf7, 0x31, 0x2c, 0x7d, 0x30, 0x56, 0x6f, 0x7a, 0x8e, 0x1f, 0x93, 0xfc, 0xae, + 0x08, 0xe5, 0x0f, 0x1d, 0x5b, 0x29, 0xbd, 0x4b, 0xd0, 0xa4, 0xbf, 0x81, 0x87, 0x7a, 0xf2, 0x5e, + 0x49, 0x3c, 0x41, 0x93, 0x3f, 0xcf, 0xc7, 0x03, 0xeb, 0x54, 0x64, 0x79, 0x62, 0x44, 0x57, 0xa1, + 0x30, 0xf4, 0xad, 0xc3, 0x61, 0x88, 0xc5, 0x67, 0x7a, 0xf1, 0x04, 0x4d, 0x65, 0x9e, 0xfa, 0xc8, + 0xf3, 0x70, 0x5f, 0x1c, 0xc1, 0xe5, 0xf0, 0xcc, 0x7d, 0xcc, 0xdb, 0xaf, 0x42, 0x9b, 0xf8, 0x47, + 0x12, 0xd7, 0x3c, 0xd9, 0xb9, 0xbd, 0x28, 0xbe, 0x5d, 0xdd, 0xf7, 0x49, 0x48, 0xf6, 0x8b, 0xbf, + 0x28, 0x95, 0xf7, 0x76, 0x0f, 0x0e, 0x6b, 0xec, 0x63, 0xd0, 0x37, 0xff, 0x19, 0x00, 0x00, 0xff, + 0xff, 0xd4, 0x0a, 0xef, 0xca, 0xe4, 0x3a, 0x00, 0x00, +} diff --git a/vendor/github.com/googleapis/gnostic/OpenAPIv2/OpenAPIv2.proto b/vendor/github.com/googleapis/gnostic/OpenAPIv2/OpenAPIv2.proto new file mode 100644 index 000000000..557c88072 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/OpenAPIv2/OpenAPIv2.proto @@ -0,0 +1,663 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// THIS FILE IS AUTOMATICALLY GENERATED. + +syntax = "proto3"; + +package openapi.v2; + +import "google/protobuf/any.proto"; + +// This option lets the proto compiler generate Java code inside the package +// name (see below) instead of inside an outer class. It creates a simpler +// developer experience by reducing one-level of name nesting and be +// consistent with most programming languages that don't support outer classes. +option java_multiple_files = true; + +// The Java outer classname should be the filename in UpperCamelCase. This +// class is only used to hold proto descriptor, so developers don't need to +// work with it directly. +option java_outer_classname = "OpenAPIProto"; + +// The Java package name must be proto package name with proper prefix. +option java_package = "org.openapi_v2"; + +// A reasonable prefix for the Objective-C symbols generated from the package. +// It should at a minimum be 3 characters long, all uppercase, and convention +// is to use an abbreviation of the package name. Something short, but +// hopefully unique enough to not conflict with things that may come along in +// the future. 'GPB' is reserved for the protocol buffer implementation itself. +option objc_class_prefix = "OAS"; + +message AdditionalPropertiesItem { + oneof oneof { + Schema schema = 1; + bool boolean = 2; + } +} + +message Any { + google.protobuf.Any value = 1; + string yaml = 2; +} + +message ApiKeySecurity { + string type = 1; + string name = 2; + string in = 3; + string description = 4; + repeated NamedAny vendor_extension = 5; +} + +message BasicAuthenticationSecurity { + string type = 1; + string description = 2; + repeated NamedAny vendor_extension = 3; +} + +message BodyParameter { + // A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed. + string description = 1; + // The name of the parameter. + string name = 2; + // Determines the location of the parameter. + string in = 3; + // Determines whether or not this parameter is required or optional. + bool required = 4; + Schema schema = 5; + repeated NamedAny vendor_extension = 6; +} + +// Contact information for the owners of the API. +message Contact { + // The identifying name of the contact person/organization. + string name = 1; + // The URL pointing to the contact information. + string url = 2; + // The email address of the contact person/organization. + string email = 3; + repeated NamedAny vendor_extension = 4; +} + +message Default { + repeated NamedAny additional_properties = 1; +} + +// One or more JSON objects describing the schemas being consumed and produced by the API. +message Definitions { + repeated NamedSchema additional_properties = 1; +} + +message Document { + // The Swagger version of this document. + string swagger = 1; + Info info = 2; + // The host (name or ip) of the API. Example: 'swagger.io' + string host = 3; + // The base path to the API. Example: '/api'. + string base_path = 4; + // The transfer protocol of the API. + repeated string schemes = 5; + // A list of MIME types accepted by the API. + repeated string consumes = 6; + // A list of MIME types the API can produce. + repeated string produces = 7; + Paths paths = 8; + Definitions definitions = 9; + ParameterDefinitions parameters = 10; + ResponseDefinitions responses = 11; + repeated SecurityRequirement security = 12; + SecurityDefinitions security_definitions = 13; + repeated Tag tags = 14; + ExternalDocs external_docs = 15; + repeated NamedAny vendor_extension = 16; +} + +message Examples { + repeated NamedAny additional_properties = 1; +} + +// information about external documentation +message ExternalDocs { + string description = 1; + string url = 2; + repeated NamedAny vendor_extension = 3; +} + +// A deterministic version of a JSON Schema object. +message FileSchema { + string format = 1; + string title = 2; + string description = 3; + Any default = 4; + repeated string required = 5; + string type = 6; + bool read_only = 7; + ExternalDocs external_docs = 8; + Any example = 9; + repeated NamedAny vendor_extension = 10; +} + +message FormDataParameterSubSchema { + // Determines whether or not this parameter is required or optional. + bool required = 1; + // Determines the location of the parameter. + string in = 2; + // A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed. + string description = 3; + // The name of the parameter. + string name = 4; + // allows sending a parameter by name only or with an empty value. + bool allow_empty_value = 5; + string type = 6; + string format = 7; + PrimitivesItems items = 8; + string collection_format = 9; + Any default = 10; + double maximum = 11; + bool exclusive_maximum = 12; + double minimum = 13; + bool exclusive_minimum = 14; + int64 max_length = 15; + int64 min_length = 16; + string pattern = 17; + int64 max_items = 18; + int64 min_items = 19; + bool unique_items = 20; + repeated Any enum = 21; + double multiple_of = 22; + repeated NamedAny vendor_extension = 23; +} + +message Header { + string type = 1; + string format = 2; + PrimitivesItems items = 3; + string collection_format = 4; + Any default = 5; + double maximum = 6; + bool exclusive_maximum = 7; + double minimum = 8; + bool exclusive_minimum = 9; + int64 max_length = 10; + int64 min_length = 11; + string pattern = 12; + int64 max_items = 13; + int64 min_items = 14; + bool unique_items = 15; + repeated Any enum = 16; + double multiple_of = 17; + string description = 18; + repeated NamedAny vendor_extension = 19; +} + +message HeaderParameterSubSchema { + // Determines whether or not this parameter is required or optional. + bool required = 1; + // Determines the location of the parameter. + string in = 2; + // A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed. + string description = 3; + // The name of the parameter. + string name = 4; + string type = 5; + string format = 6; + PrimitivesItems items = 7; + string collection_format = 8; + Any default = 9; + double maximum = 10; + bool exclusive_maximum = 11; + double minimum = 12; + bool exclusive_minimum = 13; + int64 max_length = 14; + int64 min_length = 15; + string pattern = 16; + int64 max_items = 17; + int64 min_items = 18; + bool unique_items = 19; + repeated Any enum = 20; + double multiple_of = 21; + repeated NamedAny vendor_extension = 22; +} + +message Headers { + repeated NamedHeader additional_properties = 1; +} + +// General information about the API. +message Info { + // A unique and precise title of the API. + string title = 1; + // A semantic version number of the API. + string version = 2; + // A longer description of the API. Should be different from the title. GitHub Flavored Markdown is allowed. + string description = 3; + // The terms of service for the API. + string terms_of_service = 4; + Contact contact = 5; + License license = 6; + repeated NamedAny vendor_extension = 7; +} + +message ItemsItem { + repeated Schema schema = 1; +} + +message JsonReference { + string _ref = 1; + string description = 2; +} + +message License { + // The name of the license type. It's encouraged to use an OSI compatible license. + string name = 1; + // The URL pointing to the license. + string url = 2; + repeated NamedAny vendor_extension = 3; +} + +// Automatically-generated message used to represent maps of Any as ordered (name,value) pairs. +message NamedAny { + // Map key + string name = 1; + // Mapped value + Any value = 2; +} + +// Automatically-generated message used to represent maps of Header as ordered (name,value) pairs. +message NamedHeader { + // Map key + string name = 1; + // Mapped value + Header value = 2; +} + +// Automatically-generated message used to represent maps of Parameter as ordered (name,value) pairs. +message NamedParameter { + // Map key + string name = 1; + // Mapped value + Parameter value = 2; +} + +// Automatically-generated message used to represent maps of PathItem as ordered (name,value) pairs. +message NamedPathItem { + // Map key + string name = 1; + // Mapped value + PathItem value = 2; +} + +// Automatically-generated message used to represent maps of Response as ordered (name,value) pairs. +message NamedResponse { + // Map key + string name = 1; + // Mapped value + Response value = 2; +} + +// Automatically-generated message used to represent maps of ResponseValue as ordered (name,value) pairs. +message NamedResponseValue { + // Map key + string name = 1; + // Mapped value + ResponseValue value = 2; +} + +// Automatically-generated message used to represent maps of Schema as ordered (name,value) pairs. +message NamedSchema { + // Map key + string name = 1; + // Mapped value + Schema value = 2; +} + +// Automatically-generated message used to represent maps of SecurityDefinitionsItem as ordered (name,value) pairs. +message NamedSecurityDefinitionsItem { + // Map key + string name = 1; + // Mapped value + SecurityDefinitionsItem value = 2; +} + +// Automatically-generated message used to represent maps of string as ordered (name,value) pairs. +message NamedString { + // Map key + string name = 1; + // Mapped value + string value = 2; +} + +// Automatically-generated message used to represent maps of StringArray as ordered (name,value) pairs. +message NamedStringArray { + // Map key + string name = 1; + // Mapped value + StringArray value = 2; +} + +message NonBodyParameter { + oneof oneof { + HeaderParameterSubSchema header_parameter_sub_schema = 1; + FormDataParameterSubSchema form_data_parameter_sub_schema = 2; + QueryParameterSubSchema query_parameter_sub_schema = 3; + PathParameterSubSchema path_parameter_sub_schema = 4; + } +} + +message Oauth2AccessCodeSecurity { + string type = 1; + string flow = 2; + Oauth2Scopes scopes = 3; + string authorization_url = 4; + string token_url = 5; + string description = 6; + repeated NamedAny vendor_extension = 7; +} + +message Oauth2ApplicationSecurity { + string type = 1; + string flow = 2; + Oauth2Scopes scopes = 3; + string token_url = 4; + string description = 5; + repeated NamedAny vendor_extension = 6; +} + +message Oauth2ImplicitSecurity { + string type = 1; + string flow = 2; + Oauth2Scopes scopes = 3; + string authorization_url = 4; + string description = 5; + repeated NamedAny vendor_extension = 6; +} + +message Oauth2PasswordSecurity { + string type = 1; + string flow = 2; + Oauth2Scopes scopes = 3; + string token_url = 4; + string description = 5; + repeated NamedAny vendor_extension = 6; +} + +message Oauth2Scopes { + repeated NamedString additional_properties = 1; +} + +message Operation { + repeated string tags = 1; + // A brief summary of the operation. + string summary = 2; + // A longer description of the operation, GitHub Flavored Markdown is allowed. + string description = 3; + ExternalDocs external_docs = 4; + // A unique identifier of the operation. + string operation_id = 5; + // A list of MIME types the API can produce. + repeated string produces = 6; + // A list of MIME types the API can consume. + repeated string consumes = 7; + // The parameters needed to send a valid API call. + repeated ParametersItem parameters = 8; + Responses responses = 9; + // The transfer protocol of the API. + repeated string schemes = 10; + bool deprecated = 11; + repeated SecurityRequirement security = 12; + repeated NamedAny vendor_extension = 13; +} + +message Parameter { + oneof oneof { + BodyParameter body_parameter = 1; + NonBodyParameter non_body_parameter = 2; + } +} + +// One or more JSON representations for parameters +message ParameterDefinitions { + repeated NamedParameter additional_properties = 1; +} + +message ParametersItem { + oneof oneof { + Parameter parameter = 1; + JsonReference json_reference = 2; + } +} + +message PathItem { + string _ref = 1; + Operation get = 2; + Operation put = 3; + Operation post = 4; + Operation delete = 5; + Operation options = 6; + Operation head = 7; + Operation patch = 8; + // The parameters needed to send a valid API call. + repeated ParametersItem parameters = 9; + repeated NamedAny vendor_extension = 10; +} + +message PathParameterSubSchema { + // Determines whether or not this parameter is required or optional. + bool required = 1; + // Determines the location of the parameter. + string in = 2; + // A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed. + string description = 3; + // The name of the parameter. + string name = 4; + string type = 5; + string format = 6; + PrimitivesItems items = 7; + string collection_format = 8; + Any default = 9; + double maximum = 10; + bool exclusive_maximum = 11; + double minimum = 12; + bool exclusive_minimum = 13; + int64 max_length = 14; + int64 min_length = 15; + string pattern = 16; + int64 max_items = 17; + int64 min_items = 18; + bool unique_items = 19; + repeated Any enum = 20; + double multiple_of = 21; + repeated NamedAny vendor_extension = 22; +} + +// Relative paths to the individual endpoints. They must be relative to the 'basePath'. +message Paths { + repeated NamedAny vendor_extension = 1; + repeated NamedPathItem path = 2; +} + +message PrimitivesItems { + string type = 1; + string format = 2; + PrimitivesItems items = 3; + string collection_format = 4; + Any default = 5; + double maximum = 6; + bool exclusive_maximum = 7; + double minimum = 8; + bool exclusive_minimum = 9; + int64 max_length = 10; + int64 min_length = 11; + string pattern = 12; + int64 max_items = 13; + int64 min_items = 14; + bool unique_items = 15; + repeated Any enum = 16; + double multiple_of = 17; + repeated NamedAny vendor_extension = 18; +} + +message Properties { + repeated NamedSchema additional_properties = 1; +} + +message QueryParameterSubSchema { + // Determines whether or not this parameter is required or optional. + bool required = 1; + // Determines the location of the parameter. + string in = 2; + // A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed. + string description = 3; + // The name of the parameter. + string name = 4; + // allows sending a parameter by name only or with an empty value. + bool allow_empty_value = 5; + string type = 6; + string format = 7; + PrimitivesItems items = 8; + string collection_format = 9; + Any default = 10; + double maximum = 11; + bool exclusive_maximum = 12; + double minimum = 13; + bool exclusive_minimum = 14; + int64 max_length = 15; + int64 min_length = 16; + string pattern = 17; + int64 max_items = 18; + int64 min_items = 19; + bool unique_items = 20; + repeated Any enum = 21; + double multiple_of = 22; + repeated NamedAny vendor_extension = 23; +} + +message Response { + string description = 1; + SchemaItem schema = 2; + Headers headers = 3; + Examples examples = 4; + repeated NamedAny vendor_extension = 5; +} + +// One or more JSON representations for parameters +message ResponseDefinitions { + repeated NamedResponse additional_properties = 1; +} + +message ResponseValue { + oneof oneof { + Response response = 1; + JsonReference json_reference = 2; + } +} + +// Response objects names can either be any valid HTTP status code or 'default'. +message Responses { + repeated NamedResponseValue response_code = 1; + repeated NamedAny vendor_extension = 2; +} + +// A deterministic version of a JSON Schema object. +message Schema { + string _ref = 1; + string format = 2; + string title = 3; + string description = 4; + Any default = 5; + double multiple_of = 6; + double maximum = 7; + bool exclusive_maximum = 8; + double minimum = 9; + bool exclusive_minimum = 10; + int64 max_length = 11; + int64 min_length = 12; + string pattern = 13; + int64 max_items = 14; + int64 min_items = 15; + bool unique_items = 16; + int64 max_properties = 17; + int64 min_properties = 18; + repeated string required = 19; + repeated Any enum = 20; + AdditionalPropertiesItem additional_properties = 21; + TypeItem type = 22; + ItemsItem items = 23; + repeated Schema all_of = 24; + Properties properties = 25; + string discriminator = 26; + bool read_only = 27; + Xml xml = 28; + ExternalDocs external_docs = 29; + Any example = 30; + repeated NamedAny vendor_extension = 31; +} + +message SchemaItem { + oneof oneof { + Schema schema = 1; + FileSchema file_schema = 2; + } +} + +message SecurityDefinitions { + repeated NamedSecurityDefinitionsItem additional_properties = 1; +} + +message SecurityDefinitionsItem { + oneof oneof { + BasicAuthenticationSecurity basic_authentication_security = 1; + ApiKeySecurity api_key_security = 2; + Oauth2ImplicitSecurity oauth2_implicit_security = 3; + Oauth2PasswordSecurity oauth2_password_security = 4; + Oauth2ApplicationSecurity oauth2_application_security = 5; + Oauth2AccessCodeSecurity oauth2_access_code_security = 6; + } +} + +message SecurityRequirement { + repeated NamedStringArray additional_properties = 1; +} + +message StringArray { + repeated string value = 1; +} + +message Tag { + string name = 1; + string description = 2; + ExternalDocs external_docs = 3; + repeated NamedAny vendor_extension = 4; +} + +message TypeItem { + repeated string value = 1; +} + +// Any property starting with x- is valid. +message VendorExtension { + repeated NamedAny additional_properties = 1; +} + +message Xml { + string name = 1; + string namespace = 2; + string prefix = 3; + bool attribute = 4; + bool wrapped = 5; + repeated NamedAny vendor_extension = 6; +} + diff --git a/vendor/github.com/googleapis/gnostic/OpenAPIv2/README.md b/vendor/github.com/googleapis/gnostic/OpenAPIv2/README.md new file mode 100644 index 000000000..836fb32a7 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/OpenAPIv2/README.md @@ -0,0 +1,16 @@ +# OpenAPI v2 Protocol Buffer Models + +This directory contains a Protocol Buffer-language model +and related code for supporting OpenAPI v2. + +Gnostic applications and plugins can use OpenAPIv2.proto +to generate Protocol Buffer support code for their preferred languages. + +OpenAPIv2.go is used by Gnostic to read JSON and YAML OpenAPI +descriptions into the Protocol Buffer-based datastructures +generated from OpenAPIv2.proto. + +OpenAPIv2.proto and OpenAPIv2.go are generated by the Gnostic +compiler generator, and OpenAPIv2.pb.go is generated by +protoc, the Protocol Buffer compiler, and protoc-gen-go, the +Protocol Buffer Go code generation plugin. diff --git a/vendor/github.com/googleapis/gnostic/OpenAPIv2/openapi-2.0.json b/vendor/github.com/googleapis/gnostic/OpenAPIv2/openapi-2.0.json new file mode 100644 index 000000000..2815a26ea --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/OpenAPIv2/openapi-2.0.json @@ -0,0 +1,1610 @@ +{ + "title": "A JSON Schema for Swagger 2.0 API.", + "id": "http://swagger.io/v2/schema.json#", + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "required": [ + "swagger", + "info", + "paths" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "swagger": { + "type": "string", + "enum": [ + "2.0" + ], + "description": "The Swagger version of this document." + }, + "info": { + "$ref": "#/definitions/info" + }, + "host": { + "type": "string", + "pattern": "^[^{}/ :\\\\]+(?::\\d+)?$", + "description": "The host (name or ip) of the API. Example: 'swagger.io'" + }, + "basePath": { + "type": "string", + "pattern": "^/", + "description": "The base path to the API. Example: '/api'." + }, + "schemes": { + "$ref": "#/definitions/schemesList" + }, + "consumes": { + "description": "A list of MIME types accepted by the API.", + "allOf": [ + { + "$ref": "#/definitions/mediaTypeList" + } + ] + }, + "produces": { + "description": "A list of MIME types the API can produce.", + "allOf": [ + { + "$ref": "#/definitions/mediaTypeList" + } + ] + }, + "paths": { + "$ref": "#/definitions/paths" + }, + "definitions": { + "$ref": "#/definitions/definitions" + }, + "parameters": { + "$ref": "#/definitions/parameterDefinitions" + }, + "responses": { + "$ref": "#/definitions/responseDefinitions" + }, + "security": { + "$ref": "#/definitions/security" + }, + "securityDefinitions": { + "$ref": "#/definitions/securityDefinitions" + }, + "tags": { + "type": "array", + "items": { + "$ref": "#/definitions/tag" + }, + "uniqueItems": true + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + } + }, + "definitions": { + "info": { + "type": "object", + "description": "General information about the API.", + "required": [ + "version", + "title" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "title": { + "type": "string", + "description": "A unique and precise title of the API." + }, + "version": { + "type": "string", + "description": "A semantic version number of the API." + }, + "description": { + "type": "string", + "description": "A longer description of the API. Should be different from the title. GitHub Flavored Markdown is allowed." + }, + "termsOfService": { + "type": "string", + "description": "The terms of service for the API." + }, + "contact": { + "$ref": "#/definitions/contact" + }, + "license": { + "$ref": "#/definitions/license" + } + } + }, + "contact": { + "type": "object", + "description": "Contact information for the owners of the API.", + "additionalProperties": false, + "properties": { + "name": { + "type": "string", + "description": "The identifying name of the contact person/organization." + }, + "url": { + "type": "string", + "description": "The URL pointing to the contact information.", + "format": "uri" + }, + "email": { + "type": "string", + "description": "The email address of the contact person/organization.", + "format": "email" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "license": { + "type": "object", + "required": [ + "name" + ], + "additionalProperties": false, + "properties": { + "name": { + "type": "string", + "description": "The name of the license type. It's encouraged to use an OSI compatible license." + }, + "url": { + "type": "string", + "description": "The URL pointing to the license.", + "format": "uri" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "paths": { + "type": "object", + "description": "Relative paths to the individual endpoints. They must be relative to the 'basePath'.", + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + }, + "^/": { + "$ref": "#/definitions/pathItem" + } + }, + "additionalProperties": false + }, + "definitions": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/schema" + }, + "description": "One or more JSON objects describing the schemas being consumed and produced by the API." + }, + "parameterDefinitions": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/parameter" + }, + "description": "One or more JSON representations for parameters" + }, + "responseDefinitions": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/response" + }, + "description": "One or more JSON representations for parameters" + }, + "externalDocs": { + "type": "object", + "additionalProperties": false, + "description": "information about external documentation", + "required": [ + "url" + ], + "properties": { + "description": { + "type": "string" + }, + "url": { + "type": "string", + "format": "uri" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "examples": { + "type": "object", + "additionalProperties": true + }, + "mimeType": { + "type": "string", + "description": "The MIME type of the HTTP message." + }, + "operation": { + "type": "object", + "required": [ + "responses" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "tags": { + "type": "array", + "items": { + "type": "string" + }, + "uniqueItems": true + }, + "summary": { + "type": "string", + "description": "A brief summary of the operation." + }, + "description": { + "type": "string", + "description": "A longer description of the operation, GitHub Flavored Markdown is allowed." + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + }, + "operationId": { + "type": "string", + "description": "A unique identifier of the operation." + }, + "produces": { + "description": "A list of MIME types the API can produce.", + "allOf": [ + { + "$ref": "#/definitions/mediaTypeList" + } + ] + }, + "consumes": { + "description": "A list of MIME types the API can consume.", + "allOf": [ + { + "$ref": "#/definitions/mediaTypeList" + } + ] + }, + "parameters": { + "$ref": "#/definitions/parametersList" + }, + "responses": { + "$ref": "#/definitions/responses" + }, + "schemes": { + "$ref": "#/definitions/schemesList" + }, + "deprecated": { + "type": "boolean", + "default": false + }, + "security": { + "$ref": "#/definitions/security" + } + } + }, + "pathItem": { + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "$ref": { + "type": "string" + }, + "get": { + "$ref": "#/definitions/operation" + }, + "put": { + "$ref": "#/definitions/operation" + }, + "post": { + "$ref": "#/definitions/operation" + }, + "delete": { + "$ref": "#/definitions/operation" + }, + "options": { + "$ref": "#/definitions/operation" + }, + "head": { + "$ref": "#/definitions/operation" + }, + "patch": { + "$ref": "#/definitions/operation" + }, + "parameters": { + "$ref": "#/definitions/parametersList" + } + } + }, + "responses": { + "type": "object", + "description": "Response objects names can either be any valid HTTP status code or 'default'.", + "minProperties": 1, + "additionalProperties": false, + "patternProperties": { + "^([0-9]{3})$|^(default)$": { + "$ref": "#/definitions/responseValue" + }, + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "not": { + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + } + }, + "responseValue": { + "oneOf": [ + { + "$ref": "#/definitions/response" + }, + { + "$ref": "#/definitions/jsonReference" + } + ] + }, + "response": { + "type": "object", + "required": [ + "description" + ], + "properties": { + "description": { + "type": "string" + }, + "schema": { + "oneOf": [ + { + "$ref": "#/definitions/schema" + }, + { + "$ref": "#/definitions/fileSchema" + } + ] + }, + "headers": { + "$ref": "#/definitions/headers" + }, + "examples": { + "$ref": "#/definitions/examples" + } + }, + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "headers": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/header" + } + }, + "header": { + "type": "object", + "additionalProperties": false, + "required": [ + "type" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "string", + "number", + "integer", + "boolean", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormat" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "vendorExtension": { + "description": "Any property starting with x- is valid.", + "additionalProperties": true, + "additionalItems": true + }, + "bodyParameter": { + "type": "object", + "required": [ + "name", + "in", + "schema" + ], + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "body" + ] + }, + "required": { + "type": "boolean", + "description": "Determines whether or not this parameter is required or optional.", + "default": false + }, + "schema": { + "$ref": "#/definitions/schema" + } + }, + "additionalProperties": false + }, + "headerParameterSubSchema": { + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "required": { + "type": "boolean", + "description": "Determines whether or not this parameter is required or optional.", + "default": false + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "header" + ] + }, + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "type": { + "type": "string", + "enum": [ + "string", + "number", + "boolean", + "integer", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormat" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + } + }, + "queryParameterSubSchema": { + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "required": { + "type": "boolean", + "description": "Determines whether or not this parameter is required or optional.", + "default": false + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "query" + ] + }, + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "allowEmptyValue": { + "type": "boolean", + "default": false, + "description": "allows sending a parameter by name only or with an empty value." + }, + "type": { + "type": "string", + "enum": [ + "string", + "number", + "boolean", + "integer", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormatWithMulti" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + } + }, + "formDataParameterSubSchema": { + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "required": { + "type": "boolean", + "description": "Determines whether or not this parameter is required or optional.", + "default": false + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "formData" + ] + }, + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "allowEmptyValue": { + "type": "boolean", + "default": false, + "description": "allows sending a parameter by name only or with an empty value." + }, + "type": { + "type": "string", + "enum": [ + "string", + "number", + "boolean", + "integer", + "array", + "file" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormatWithMulti" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + } + }, + "pathParameterSubSchema": { + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "required": [ + "required" + ], + "properties": { + "required": { + "type": "boolean", + "enum": [ + true + ], + "description": "Determines whether or not this parameter is required or optional." + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "path" + ] + }, + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "type": { + "type": "string", + "enum": [ + "string", + "number", + "boolean", + "integer", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormat" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + } + }, + "nonBodyParameter": { + "type": "object", + "required": [ + "name", + "in", + "type" + ], + "oneOf": [ + { + "$ref": "#/definitions/headerParameterSubSchema" + }, + { + "$ref": "#/definitions/formDataParameterSubSchema" + }, + { + "$ref": "#/definitions/queryParameterSubSchema" + }, + { + "$ref": "#/definitions/pathParameterSubSchema" + } + ] + }, + "parameter": { + "oneOf": [ + { + "$ref": "#/definitions/bodyParameter" + }, + { + "$ref": "#/definitions/nonBodyParameter" + } + ] + }, + "schema": { + "type": "object", + "description": "A deterministic version of a JSON Schema object.", + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "$ref": { + "type": "string" + }, + "format": { + "type": "string" + }, + "title": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/title" + }, + "description": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/description" + }, + "default": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/default" + }, + "multipleOf": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/multipleOf" + }, + "maximum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/maximum" + }, + "exclusiveMaximum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMaximum" + }, + "minimum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/minimum" + }, + "exclusiveMinimum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMinimum" + }, + "maxLength": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minLength": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "pattern": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/pattern" + }, + "maxItems": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minItems": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "uniqueItems": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/uniqueItems" + }, + "maxProperties": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minProperties": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "required": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/stringArray" + }, + "enum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/enum" + }, + "additionalProperties": { + "oneOf": [ + { + "$ref": "#/definitions/schema" + }, + { + "type": "boolean" + } + ], + "default": {} + }, + "type": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/type" + }, + "items": { + "anyOf": [ + { + "$ref": "#/definitions/schema" + }, + { + "type": "array", + "minItems": 1, + "items": { + "$ref": "#/definitions/schema" + } + } + ], + "default": {} + }, + "allOf": { + "type": "array", + "minItems": 1, + "items": { + "$ref": "#/definitions/schema" + } + }, + "properties": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/schema" + }, + "default": {} + }, + "discriminator": { + "type": "string" + }, + "readOnly": { + "type": "boolean", + "default": false + }, + "xml": { + "$ref": "#/definitions/xml" + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + }, + "example": {} + }, + "additionalProperties": false + }, + "fileSchema": { + "type": "object", + "description": "A deterministic version of a JSON Schema object.", + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "required": [ + "type" + ], + "properties": { + "format": { + "type": "string" + }, + "title": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/title" + }, + "description": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/description" + }, + "default": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/default" + }, + "required": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/stringArray" + }, + "type": { + "type": "string", + "enum": [ + "file" + ] + }, + "readOnly": { + "type": "boolean", + "default": false + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + }, + "example": {} + }, + "additionalProperties": false + }, + "primitivesItems": { + "type": "object", + "additionalProperties": false, + "properties": { + "type": { + "type": "string", + "enum": [ + "string", + "number", + "integer", + "boolean", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormat" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "security": { + "type": "array", + "items": { + "$ref": "#/definitions/securityRequirement" + }, + "uniqueItems": true + }, + "securityRequirement": { + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "type": "string" + }, + "uniqueItems": true + } + }, + "xml": { + "type": "object", + "additionalProperties": false, + "properties": { + "name": { + "type": "string" + }, + "namespace": { + "type": "string" + }, + "prefix": { + "type": "string" + }, + "attribute": { + "type": "boolean", + "default": false + }, + "wrapped": { + "type": "boolean", + "default": false + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "tag": { + "type": "object", + "additionalProperties": false, + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": "string" + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "securityDefinitions": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "$ref": "#/definitions/basicAuthenticationSecurity" + }, + { + "$ref": "#/definitions/apiKeySecurity" + }, + { + "$ref": "#/definitions/oauth2ImplicitSecurity" + }, + { + "$ref": "#/definitions/oauth2PasswordSecurity" + }, + { + "$ref": "#/definitions/oauth2ApplicationSecurity" + }, + { + "$ref": "#/definitions/oauth2AccessCodeSecurity" + } + ] + } + }, + "basicAuthenticationSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "basic" + ] + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "apiKeySecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "name", + "in" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "apiKey" + ] + }, + "name": { + "type": "string" + }, + "in": { + "type": "string", + "enum": [ + "header", + "query" + ] + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2ImplicitSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "flow", + "authorizationUrl" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "oauth2" + ] + }, + "flow": { + "type": "string", + "enum": [ + "implicit" + ] + }, + "scopes": { + "$ref": "#/definitions/oauth2Scopes" + }, + "authorizationUrl": { + "type": "string", + "format": "uri" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2PasswordSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "flow", + "tokenUrl" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "oauth2" + ] + }, + "flow": { + "type": "string", + "enum": [ + "password" + ] + }, + "scopes": { + "$ref": "#/definitions/oauth2Scopes" + }, + "tokenUrl": { + "type": "string", + "format": "uri" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2ApplicationSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "flow", + "tokenUrl" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "oauth2" + ] + }, + "flow": { + "type": "string", + "enum": [ + "application" + ] + }, + "scopes": { + "$ref": "#/definitions/oauth2Scopes" + }, + "tokenUrl": { + "type": "string", + "format": "uri" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2AccessCodeSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "flow", + "authorizationUrl", + "tokenUrl" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "oauth2" + ] + }, + "flow": { + "type": "string", + "enum": [ + "accessCode" + ] + }, + "scopes": { + "$ref": "#/definitions/oauth2Scopes" + }, + "authorizationUrl": { + "type": "string", + "format": "uri" + }, + "tokenUrl": { + "type": "string", + "format": "uri" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2Scopes": { + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "mediaTypeList": { + "type": "array", + "items": { + "$ref": "#/definitions/mimeType" + }, + "uniqueItems": true + }, + "parametersList": { + "type": "array", + "description": "The parameters needed to send a valid API call.", + "additionalItems": false, + "items": { + "oneOf": [ + { + "$ref": "#/definitions/parameter" + }, + { + "$ref": "#/definitions/jsonReference" + } + ] + }, + "uniqueItems": true + }, + "schemesList": { + "type": "array", + "description": "The transfer protocol of the API.", + "items": { + "type": "string", + "enum": [ + "http", + "https", + "ws", + "wss" + ] + }, + "uniqueItems": true + }, + "collectionFormat": { + "type": "string", + "enum": [ + "csv", + "ssv", + "tsv", + "pipes" + ], + "default": "csv" + }, + "collectionFormatWithMulti": { + "type": "string", + "enum": [ + "csv", + "ssv", + "tsv", + "pipes", + "multi" + ], + "default": "csv" + }, + "title": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/title" + }, + "description": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/description" + }, + "default": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/default" + }, + "multipleOf": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/multipleOf" + }, + "maximum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/maximum" + }, + "exclusiveMaximum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMaximum" + }, + "minimum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/minimum" + }, + "exclusiveMinimum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMinimum" + }, + "maxLength": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minLength": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "pattern": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/pattern" + }, + "maxItems": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minItems": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "uniqueItems": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/uniqueItems" + }, + "enum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/enum" + }, + "jsonReference": { + "type": "object", + "required": [ + "$ref" + ], + "additionalProperties": false, + "properties": { + "$ref": { + "type": "string" + }, + "description": { + "type": "string" + } + } + } + } +} diff --git a/vendor/github.com/googleapis/gnostic/OpenAPIv3/OpenAPIv3.go b/vendor/github.com/googleapis/gnostic/OpenAPIv3/OpenAPIv3.go new file mode 100644 index 000000000..2559f8237 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/OpenAPIv3/OpenAPIv3.go @@ -0,0 +1,8333 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// THIS FILE IS AUTOMATICALLY GENERATED. + +package openapi_v3 + +import ( + "fmt" + "github.com/googleapis/gnostic/compiler" + "gopkg.in/yaml.v2" + "regexp" + "strings" +) + +// Version returns the package name (and OpenAPI version). +func Version() string { + return "openapi_v3" +} + +// NewAdditionalPropertiesItem creates an object of type AdditionalPropertiesItem if possible, returning an error if not. +func NewAdditionalPropertiesItem(in interface{}, context *compiler.Context) (*AdditionalPropertiesItem, error) { + errors := make([]error, 0) + x := &AdditionalPropertiesItem{} + matched := false + // SchemaOrReference schema_or_reference = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewSchemaOrReference(m, compiler.NewContext("schemaOrReference", context)) + if matchingError == nil { + x.Oneof = &AdditionalPropertiesItem_SchemaOrReference{SchemaOrReference: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // bool boolean = 2; + boolValue, ok := in.(bool) + if ok { + x.Oneof = &AdditionalPropertiesItem_Boolean{Boolean: boolValue} + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewAny creates an object of type Any if possible, returning an error if not. +func NewAny(in interface{}, context *compiler.Context) (*Any, error) { + errors := make([]error, 0) + x := &Any{} + bytes, _ := yaml.Marshal(in) + x.Yaml = string(bytes) + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewAnyOrExpression creates an object of type AnyOrExpression if possible, returning an error if not. +func NewAnyOrExpression(in interface{}, context *compiler.Context) (*AnyOrExpression, error) { + errors := make([]error, 0) + x := &AnyOrExpression{} + matched := false + // Any any = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewAny(m, compiler.NewContext("any", context)) + if matchingError == nil { + x.Oneof = &AnyOrExpression_Any{Any: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // Expression expression = 2; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewExpression(m, compiler.NewContext("expression", context)) + if matchingError == nil { + x.Oneof = &AnyOrExpression_Expression{Expression: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewAnysOrExpressions creates an object of type AnysOrExpressions if possible, returning an error if not. +func NewAnysOrExpressions(in interface{}, context *compiler.Context) (*AnysOrExpressions, error) { + errors := make([]error, 0) + x := &AnysOrExpressions{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedAnyOrExpression additional_properties = 1; + // MAP: AnyOrExpression + x.AdditionalProperties = make([]*NamedAnyOrExpression, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + pair := &NamedAnyOrExpression{} + pair.Name = k + var err error + pair.Value, err = NewAnyOrExpression(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewCallback creates an object of type Callback if possible, returning an error if not. +func NewCallback(in interface{}, context *compiler.Context) (*Callback, error) { + errors := make([]error, 0) + x := &Callback{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{} + allowedPatterns := []*regexp.Regexp{pattern0, pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // repeated NamedPathItem path = 1; + // MAP: PathItem ^ + x.Path = make([]*NamedPathItem, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if true { + pair := &NamedPathItem{} + pair.Name = k + var err error + pair.Value, err = NewPathItem(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + x.Path = append(x.Path, pair) + } + } + } + // repeated NamedAny specification_extension = 2; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewCallbackOrReference creates an object of type CallbackOrReference if possible, returning an error if not. +func NewCallbackOrReference(in interface{}, context *compiler.Context) (*CallbackOrReference, error) { + errors := make([]error, 0) + x := &CallbackOrReference{} + matched := false + // Callback callback = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewCallback(m, compiler.NewContext("callback", context)) + if matchingError == nil { + x.Oneof = &CallbackOrReference_Callback{Callback: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // Reference reference = 2; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewReference(m, compiler.NewContext("reference", context)) + if matchingError == nil { + x.Oneof = &CallbackOrReference_Reference{Reference: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewCallbacksOrReferences creates an object of type CallbacksOrReferences if possible, returning an error if not. +func NewCallbacksOrReferences(in interface{}, context *compiler.Context) (*CallbacksOrReferences, error) { + errors := make([]error, 0) + x := &CallbacksOrReferences{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedCallbackOrReference additional_properties = 1; + // MAP: CallbackOrReference + x.AdditionalProperties = make([]*NamedCallbackOrReference, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + pair := &NamedCallbackOrReference{} + pair.Name = k + var err error + pair.Value, err = NewCallbackOrReference(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewComponents creates an object of type Components if possible, returning an error if not. +func NewComponents(in interface{}, context *compiler.Context) (*Components, error) { + errors := make([]error, 0) + x := &Components{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"callbacks", "examples", "headers", "links", "parameters", "requestBodies", "responses", "schemas", "securitySchemes"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // SchemasOrReferences schemas = 1; + v1 := compiler.MapValueForKey(m, "schemas") + if v1 != nil { + var err error + x.Schemas, err = NewSchemasOrReferences(v1, compiler.NewContext("schemas", context)) + if err != nil { + errors = append(errors, err) + } + } + // ResponsesOrReferences responses = 2; + v2 := compiler.MapValueForKey(m, "responses") + if v2 != nil { + var err error + x.Responses, err = NewResponsesOrReferences(v2, compiler.NewContext("responses", context)) + if err != nil { + errors = append(errors, err) + } + } + // ParametersOrReferences parameters = 3; + v3 := compiler.MapValueForKey(m, "parameters") + if v3 != nil { + var err error + x.Parameters, err = NewParametersOrReferences(v3, compiler.NewContext("parameters", context)) + if err != nil { + errors = append(errors, err) + } + } + // ExamplesOrReferences examples = 4; + v4 := compiler.MapValueForKey(m, "examples") + if v4 != nil { + var err error + x.Examples, err = NewExamplesOrReferences(v4, compiler.NewContext("examples", context)) + if err != nil { + errors = append(errors, err) + } + } + // RequestBodiesOrReferences request_bodies = 5; + v5 := compiler.MapValueForKey(m, "requestBodies") + if v5 != nil { + var err error + x.RequestBodies, err = NewRequestBodiesOrReferences(v5, compiler.NewContext("requestBodies", context)) + if err != nil { + errors = append(errors, err) + } + } + // HeadersOrReferences headers = 6; + v6 := compiler.MapValueForKey(m, "headers") + if v6 != nil { + var err error + x.Headers, err = NewHeadersOrReferences(v6, compiler.NewContext("headers", context)) + if err != nil { + errors = append(errors, err) + } + } + // SecuritySchemesOrReferences security_schemes = 7; + v7 := compiler.MapValueForKey(m, "securitySchemes") + if v7 != nil { + var err error + x.SecuritySchemes, err = NewSecuritySchemesOrReferences(v7, compiler.NewContext("securitySchemes", context)) + if err != nil { + errors = append(errors, err) + } + } + // LinksOrReferences links = 8; + v8 := compiler.MapValueForKey(m, "links") + if v8 != nil { + var err error + x.Links, err = NewLinksOrReferences(v8, compiler.NewContext("links", context)) + if err != nil { + errors = append(errors, err) + } + } + // CallbacksOrReferences callbacks = 9; + v9 := compiler.MapValueForKey(m, "callbacks") + if v9 != nil { + var err error + x.Callbacks, err = NewCallbacksOrReferences(v9, compiler.NewContext("callbacks", context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny specification_extension = 10; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewContact creates an object of type Contact if possible, returning an error if not. +func NewContact(in interface{}, context *compiler.Context) (*Contact, error) { + errors := make([]error, 0) + x := &Contact{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"email", "name", "url"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string url = 2; + v2 := compiler.MapValueForKey(m, "url") + if v2 != nil { + x.Url, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for url: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string email = 3; + v3 := compiler.MapValueForKey(m, "email") + if v3 != nil { + x.Email, ok = v3.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for email: %+v (%T)", v3, v3) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny specification_extension = 4; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewDefaultType creates an object of type DefaultType if possible, returning an error if not. +func NewDefaultType(in interface{}, context *compiler.Context) (*DefaultType, error) { + errors := make([]error, 0) + x := &DefaultType{} + matched := false + switch in := in.(type) { + case bool: + x.Oneof = &DefaultType_Boolean{Boolean: in} + matched = true + case string: + x.Oneof = &DefaultType_String_{String_: in} + matched = true + case int64: + x.Oneof = &DefaultType_Number{Number: float64(in)} + matched = true + case int32: + x.Oneof = &DefaultType_Number{Number: float64(in)} + matched = true + case int: + x.Oneof = &DefaultType_Number{Number: float64(in)} + matched = true + case float64: + x.Oneof = &DefaultType_Number{Number: in} + matched = true + case float32: + x.Oneof = &DefaultType_Number{Number: float64(in)} + matched = true + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewDiscriminator creates an object of type Discriminator if possible, returning an error if not. +func NewDiscriminator(in interface{}, context *compiler.Context) (*Discriminator, error) { + errors := make([]error, 0) + x := &Discriminator{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"propertyName"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"mapping", "propertyName"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string property_name = 1; + v1 := compiler.MapValueForKey(m, "propertyName") + if v1 != nil { + x.PropertyName, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for propertyName: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Strings mapping = 2; + v2 := compiler.MapValueForKey(m, "mapping") + if v2 != nil { + var err error + x.Mapping, err = NewStrings(v2, compiler.NewContext("mapping", context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewDocument creates an object of type Document if possible, returning an error if not. +func NewDocument(in interface{}, context *compiler.Context) (*Document, error) { + errors := make([]error, 0) + x := &Document{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"info", "openapi", "paths"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"components", "externalDocs", "info", "openapi", "paths", "security", "servers", "tags"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string openapi = 1; + v1 := compiler.MapValueForKey(m, "openapi") + if v1 != nil { + x.Openapi, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for openapi: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Info info = 2; + v2 := compiler.MapValueForKey(m, "info") + if v2 != nil { + var err error + x.Info, err = NewInfo(v2, compiler.NewContext("info", context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated Server servers = 3; + v3 := compiler.MapValueForKey(m, "servers") + if v3 != nil { + // repeated Server + x.Servers = make([]*Server, 0) + a, ok := v3.([]interface{}) + if ok { + for _, item := range a { + y, err := NewServer(item, compiler.NewContext("servers", context)) + if err != nil { + errors = append(errors, err) + } + x.Servers = append(x.Servers, y) + } + } + } + // Paths paths = 4; + v4 := compiler.MapValueForKey(m, "paths") + if v4 != nil { + var err error + x.Paths, err = NewPaths(v4, compiler.NewContext("paths", context)) + if err != nil { + errors = append(errors, err) + } + } + // Components components = 5; + v5 := compiler.MapValueForKey(m, "components") + if v5 != nil { + var err error + x.Components, err = NewComponents(v5, compiler.NewContext("components", context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated SecurityRequirement security = 6; + v6 := compiler.MapValueForKey(m, "security") + if v6 != nil { + // repeated SecurityRequirement + x.Security = make([]*SecurityRequirement, 0) + a, ok := v6.([]interface{}) + if ok { + for _, item := range a { + y, err := NewSecurityRequirement(item, compiler.NewContext("security", context)) + if err != nil { + errors = append(errors, err) + } + x.Security = append(x.Security, y) + } + } + } + // repeated Tag tags = 7; + v7 := compiler.MapValueForKey(m, "tags") + if v7 != nil { + // repeated Tag + x.Tags = make([]*Tag, 0) + a, ok := v7.([]interface{}) + if ok { + for _, item := range a { + y, err := NewTag(item, compiler.NewContext("tags", context)) + if err != nil { + errors = append(errors, err) + } + x.Tags = append(x.Tags, y) + } + } + } + // ExternalDocs external_docs = 8; + v8 := compiler.MapValueForKey(m, "externalDocs") + if v8 != nil { + var err error + x.ExternalDocs, err = NewExternalDocs(v8, compiler.NewContext("externalDocs", context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny specification_extension = 9; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewEncoding creates an object of type Encoding if possible, returning an error if not. +func NewEncoding(in interface{}, context *compiler.Context) (*Encoding, error) { + errors := make([]error, 0) + x := &Encoding{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"allowReserved", "contentType", "explode", "headers", "style"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string content_type = 1; + v1 := compiler.MapValueForKey(m, "contentType") + if v1 != nil { + x.ContentType, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for contentType: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // HeadersOrReferences headers = 2; + v2 := compiler.MapValueForKey(m, "headers") + if v2 != nil { + var err error + x.Headers, err = NewHeadersOrReferences(v2, compiler.NewContext("headers", context)) + if err != nil { + errors = append(errors, err) + } + } + // string style = 3; + v3 := compiler.MapValueForKey(m, "style") + if v3 != nil { + x.Style, ok = v3.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for style: %+v (%T)", v3, v3) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool explode = 4; + v4 := compiler.MapValueForKey(m, "explode") + if v4 != nil { + x.Explode, ok = v4.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for explode: %+v (%T)", v4, v4) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool allow_reserved = 5; + v5 := compiler.MapValueForKey(m, "allowReserved") + if v5 != nil { + x.AllowReserved, ok = v5.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for allowReserved: %+v (%T)", v5, v5) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny specification_extension = 6; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewEncodings creates an object of type Encodings if possible, returning an error if not. +func NewEncodings(in interface{}, context *compiler.Context) (*Encodings, error) { + errors := make([]error, 0) + x := &Encodings{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedEncoding additional_properties = 1; + // MAP: Encoding + x.AdditionalProperties = make([]*NamedEncoding, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + pair := &NamedEncoding{} + pair.Name = k + var err error + pair.Value, err = NewEncoding(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewExample creates an object of type Example if possible, returning an error if not. +func NewExample(in interface{}, context *compiler.Context) (*Example, error) { + errors := make([]error, 0) + x := &Example{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"description", "externalValue", "summary", "value"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string summary = 1; + v1 := compiler.MapValueForKey(m, "summary") + if v1 != nil { + x.Summary, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for summary: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 2; + v2 := compiler.MapValueForKey(m, "description") + if v2 != nil { + x.Description, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Any value = 3; + v3 := compiler.MapValueForKey(m, "value") + if v3 != nil { + var err error + x.Value, err = NewAny(v3, compiler.NewContext("value", context)) + if err != nil { + errors = append(errors, err) + } + } + // string external_value = 4; + v4 := compiler.MapValueForKey(m, "externalValue") + if v4 != nil { + x.ExternalValue, ok = v4.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for externalValue: %+v (%T)", v4, v4) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny specification_extension = 5; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewExampleOrReference creates an object of type ExampleOrReference if possible, returning an error if not. +func NewExampleOrReference(in interface{}, context *compiler.Context) (*ExampleOrReference, error) { + errors := make([]error, 0) + x := &ExampleOrReference{} + matched := false + // Example example = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewExample(m, compiler.NewContext("example", context)) + if matchingError == nil { + x.Oneof = &ExampleOrReference_Example{Example: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // Reference reference = 2; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewReference(m, compiler.NewContext("reference", context)) + if matchingError == nil { + x.Oneof = &ExampleOrReference_Reference{Reference: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewExamples creates an object of type Examples if possible, returning an error if not. +func NewExamples(in interface{}, context *compiler.Context) (*Examples, error) { + errors := make([]error, 0) + x := &Examples{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewExamplesOrReferences creates an object of type ExamplesOrReferences if possible, returning an error if not. +func NewExamplesOrReferences(in interface{}, context *compiler.Context) (*ExamplesOrReferences, error) { + errors := make([]error, 0) + x := &ExamplesOrReferences{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedExampleOrReference additional_properties = 1; + // MAP: ExampleOrReference + x.AdditionalProperties = make([]*NamedExampleOrReference, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + pair := &NamedExampleOrReference{} + pair.Name = k + var err error + pair.Value, err = NewExampleOrReference(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewExpression creates an object of type Expression if possible, returning an error if not. +func NewExpression(in interface{}, context *compiler.Context) (*Expression, error) { + errors := make([]error, 0) + x := &Expression{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedAny additional_properties = 1; + // MAP: Any + x.AdditionalProperties = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewExternalDocs creates an object of type ExternalDocs if possible, returning an error if not. +func NewExternalDocs(in interface{}, context *compiler.Context) (*ExternalDocs, error) { + errors := make([]error, 0) + x := &ExternalDocs{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"url"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"description", "url"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string description = 1; + v1 := compiler.MapValueForKey(m, "description") + if v1 != nil { + x.Description, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string url = 2; + v2 := compiler.MapValueForKey(m, "url") + if v2 != nil { + x.Url, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for url: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny specification_extension = 3; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewHeader creates an object of type Header if possible, returning an error if not. +func NewHeader(in interface{}, context *compiler.Context) (*Header, error) { + errors := make([]error, 0) + x := &Header{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"allowEmptyValue", "allowReserved", "content", "deprecated", "description", "example", "examples", "explode", "required", "schema", "style"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string description = 1; + v1 := compiler.MapValueForKey(m, "description") + if v1 != nil { + x.Description, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool required = 2; + v2 := compiler.MapValueForKey(m, "required") + if v2 != nil { + x.Required, ok = v2.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for required: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool deprecated = 3; + v3 := compiler.MapValueForKey(m, "deprecated") + if v3 != nil { + x.Deprecated, ok = v3.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for deprecated: %+v (%T)", v3, v3) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool allow_empty_value = 4; + v4 := compiler.MapValueForKey(m, "allowEmptyValue") + if v4 != nil { + x.AllowEmptyValue, ok = v4.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for allowEmptyValue: %+v (%T)", v4, v4) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string style = 5; + v5 := compiler.MapValueForKey(m, "style") + if v5 != nil { + x.Style, ok = v5.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for style: %+v (%T)", v5, v5) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool explode = 6; + v6 := compiler.MapValueForKey(m, "explode") + if v6 != nil { + x.Explode, ok = v6.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for explode: %+v (%T)", v6, v6) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool allow_reserved = 7; + v7 := compiler.MapValueForKey(m, "allowReserved") + if v7 != nil { + x.AllowReserved, ok = v7.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for allowReserved: %+v (%T)", v7, v7) + errors = append(errors, compiler.NewError(context, message)) + } + } + // SchemaOrReference schema = 8; + v8 := compiler.MapValueForKey(m, "schema") + if v8 != nil { + var err error + x.Schema, err = NewSchemaOrReference(v8, compiler.NewContext("schema", context)) + if err != nil { + errors = append(errors, err) + } + } + // Any example = 9; + v9 := compiler.MapValueForKey(m, "example") + if v9 != nil { + var err error + x.Example, err = NewAny(v9, compiler.NewContext("example", context)) + if err != nil { + errors = append(errors, err) + } + } + // ExamplesOrReferences examples = 10; + v10 := compiler.MapValueForKey(m, "examples") + if v10 != nil { + var err error + x.Examples, err = NewExamplesOrReferences(v10, compiler.NewContext("examples", context)) + if err != nil { + errors = append(errors, err) + } + } + // MediaTypes content = 11; + v11 := compiler.MapValueForKey(m, "content") + if v11 != nil { + var err error + x.Content, err = NewMediaTypes(v11, compiler.NewContext("content", context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny specification_extension = 12; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewHeaderOrReference creates an object of type HeaderOrReference if possible, returning an error if not. +func NewHeaderOrReference(in interface{}, context *compiler.Context) (*HeaderOrReference, error) { + errors := make([]error, 0) + x := &HeaderOrReference{} + matched := false + // Header header = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewHeader(m, compiler.NewContext("header", context)) + if matchingError == nil { + x.Oneof = &HeaderOrReference_Header{Header: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // Reference reference = 2; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewReference(m, compiler.NewContext("reference", context)) + if matchingError == nil { + x.Oneof = &HeaderOrReference_Reference{Reference: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewHeadersOrReferences creates an object of type HeadersOrReferences if possible, returning an error if not. +func NewHeadersOrReferences(in interface{}, context *compiler.Context) (*HeadersOrReferences, error) { + errors := make([]error, 0) + x := &HeadersOrReferences{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedHeaderOrReference additional_properties = 1; + // MAP: HeaderOrReference + x.AdditionalProperties = make([]*NamedHeaderOrReference, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + pair := &NamedHeaderOrReference{} + pair.Name = k + var err error + pair.Value, err = NewHeaderOrReference(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewInfo creates an object of type Info if possible, returning an error if not. +func NewInfo(in interface{}, context *compiler.Context) (*Info, error) { + errors := make([]error, 0) + x := &Info{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"title", "version"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"contact", "description", "license", "termsOfService", "title", "version"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string title = 1; + v1 := compiler.MapValueForKey(m, "title") + if v1 != nil { + x.Title, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for title: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 2; + v2 := compiler.MapValueForKey(m, "description") + if v2 != nil { + x.Description, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string terms_of_service = 3; + v3 := compiler.MapValueForKey(m, "termsOfService") + if v3 != nil { + x.TermsOfService, ok = v3.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for termsOfService: %+v (%T)", v3, v3) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Contact contact = 4; + v4 := compiler.MapValueForKey(m, "contact") + if v4 != nil { + var err error + x.Contact, err = NewContact(v4, compiler.NewContext("contact", context)) + if err != nil { + errors = append(errors, err) + } + } + // License license = 5; + v5 := compiler.MapValueForKey(m, "license") + if v5 != nil { + var err error + x.License, err = NewLicense(v5, compiler.NewContext("license", context)) + if err != nil { + errors = append(errors, err) + } + } + // string version = 6; + v6 := compiler.MapValueForKey(m, "version") + if v6 != nil { + x.Version, ok = v6.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for version: %+v (%T)", v6, v6) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny specification_extension = 7; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewItemsItem creates an object of type ItemsItem if possible, returning an error if not. +func NewItemsItem(in interface{}, context *compiler.Context) (*ItemsItem, error) { + errors := make([]error, 0) + x := &ItemsItem{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value for item array: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + x.SchemaOrReference = make([]*SchemaOrReference, 0) + y, err := NewSchemaOrReference(m, compiler.NewContext("", context)) + if err != nil { + return nil, err + } + x.SchemaOrReference = append(x.SchemaOrReference, y) + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewLicense creates an object of type License if possible, returning an error if not. +func NewLicense(in interface{}, context *compiler.Context) (*License, error) { + errors := make([]error, 0) + x := &License{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"name"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"name", "url"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string url = 2; + v2 := compiler.MapValueForKey(m, "url") + if v2 != nil { + x.Url, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for url: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny specification_extension = 3; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewLink creates an object of type Link if possible, returning an error if not. +func NewLink(in interface{}, context *compiler.Context) (*Link, error) { + errors := make([]error, 0) + x := &Link{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"description", "operationId", "operationRef", "parameters", "requestBody", "server"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string operation_ref = 1; + v1 := compiler.MapValueForKey(m, "operationRef") + if v1 != nil { + x.OperationRef, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for operationRef: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string operation_id = 2; + v2 := compiler.MapValueForKey(m, "operationId") + if v2 != nil { + x.OperationId, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for operationId: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // AnysOrExpressions parameters = 3; + v3 := compiler.MapValueForKey(m, "parameters") + if v3 != nil { + var err error + x.Parameters, err = NewAnysOrExpressions(v3, compiler.NewContext("parameters", context)) + if err != nil { + errors = append(errors, err) + } + } + // AnyOrExpression request_body = 4; + v4 := compiler.MapValueForKey(m, "requestBody") + if v4 != nil { + var err error + x.RequestBody, err = NewAnyOrExpression(v4, compiler.NewContext("requestBody", context)) + if err != nil { + errors = append(errors, err) + } + } + // string description = 5; + v5 := compiler.MapValueForKey(m, "description") + if v5 != nil { + x.Description, ok = v5.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v5, v5) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Server server = 6; + v6 := compiler.MapValueForKey(m, "server") + if v6 != nil { + var err error + x.Server, err = NewServer(v6, compiler.NewContext("server", context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny specification_extension = 7; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewLinkOrReference creates an object of type LinkOrReference if possible, returning an error if not. +func NewLinkOrReference(in interface{}, context *compiler.Context) (*LinkOrReference, error) { + errors := make([]error, 0) + x := &LinkOrReference{} + matched := false + // Link link = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewLink(m, compiler.NewContext("link", context)) + if matchingError == nil { + x.Oneof = &LinkOrReference_Link{Link: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // Reference reference = 2; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewReference(m, compiler.NewContext("reference", context)) + if matchingError == nil { + x.Oneof = &LinkOrReference_Reference{Reference: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewLinksOrReferences creates an object of type LinksOrReferences if possible, returning an error if not. +func NewLinksOrReferences(in interface{}, context *compiler.Context) (*LinksOrReferences, error) { + errors := make([]error, 0) + x := &LinksOrReferences{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedLinkOrReference additional_properties = 1; + // MAP: LinkOrReference + x.AdditionalProperties = make([]*NamedLinkOrReference, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + pair := &NamedLinkOrReference{} + pair.Name = k + var err error + pair.Value, err = NewLinkOrReference(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewMediaType creates an object of type MediaType if possible, returning an error if not. +func NewMediaType(in interface{}, context *compiler.Context) (*MediaType, error) { + errors := make([]error, 0) + x := &MediaType{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"encoding", "example", "examples", "schema"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // SchemaOrReference schema = 1; + v1 := compiler.MapValueForKey(m, "schema") + if v1 != nil { + var err error + x.Schema, err = NewSchemaOrReference(v1, compiler.NewContext("schema", context)) + if err != nil { + errors = append(errors, err) + } + } + // Any example = 2; + v2 := compiler.MapValueForKey(m, "example") + if v2 != nil { + var err error + x.Example, err = NewAny(v2, compiler.NewContext("example", context)) + if err != nil { + errors = append(errors, err) + } + } + // ExamplesOrReferences examples = 3; + v3 := compiler.MapValueForKey(m, "examples") + if v3 != nil { + var err error + x.Examples, err = NewExamplesOrReferences(v3, compiler.NewContext("examples", context)) + if err != nil { + errors = append(errors, err) + } + } + // Encodings encoding = 4; + v4 := compiler.MapValueForKey(m, "encoding") + if v4 != nil { + var err error + x.Encoding, err = NewEncodings(v4, compiler.NewContext("encoding", context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny specification_extension = 5; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewMediaTypes creates an object of type MediaTypes if possible, returning an error if not. +func NewMediaTypes(in interface{}, context *compiler.Context) (*MediaTypes, error) { + errors := make([]error, 0) + x := &MediaTypes{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedMediaType additional_properties = 1; + // MAP: MediaType + x.AdditionalProperties = make([]*NamedMediaType, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + pair := &NamedMediaType{} + pair.Name = k + var err error + pair.Value, err = NewMediaType(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedAny creates an object of type NamedAny if possible, returning an error if not. +func NewNamedAny(in interface{}, context *compiler.Context) (*NamedAny, error) { + errors := make([]error, 0) + x := &NamedAny{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Any value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewAny(v2, compiler.NewContext("value", context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedAnyOrExpression creates an object of type NamedAnyOrExpression if possible, returning an error if not. +func NewNamedAnyOrExpression(in interface{}, context *compiler.Context) (*NamedAnyOrExpression, error) { + errors := make([]error, 0) + x := &NamedAnyOrExpression{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // AnyOrExpression value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewAnyOrExpression(v2, compiler.NewContext("value", context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedCallbackOrReference creates an object of type NamedCallbackOrReference if possible, returning an error if not. +func NewNamedCallbackOrReference(in interface{}, context *compiler.Context) (*NamedCallbackOrReference, error) { + errors := make([]error, 0) + x := &NamedCallbackOrReference{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // CallbackOrReference value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewCallbackOrReference(v2, compiler.NewContext("value", context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedEncoding creates an object of type NamedEncoding if possible, returning an error if not. +func NewNamedEncoding(in interface{}, context *compiler.Context) (*NamedEncoding, error) { + errors := make([]error, 0) + x := &NamedEncoding{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Encoding value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewEncoding(v2, compiler.NewContext("value", context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedExampleOrReference creates an object of type NamedExampleOrReference if possible, returning an error if not. +func NewNamedExampleOrReference(in interface{}, context *compiler.Context) (*NamedExampleOrReference, error) { + errors := make([]error, 0) + x := &NamedExampleOrReference{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // ExampleOrReference value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewExampleOrReference(v2, compiler.NewContext("value", context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedHeaderOrReference creates an object of type NamedHeaderOrReference if possible, returning an error if not. +func NewNamedHeaderOrReference(in interface{}, context *compiler.Context) (*NamedHeaderOrReference, error) { + errors := make([]error, 0) + x := &NamedHeaderOrReference{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // HeaderOrReference value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewHeaderOrReference(v2, compiler.NewContext("value", context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedLinkOrReference creates an object of type NamedLinkOrReference if possible, returning an error if not. +func NewNamedLinkOrReference(in interface{}, context *compiler.Context) (*NamedLinkOrReference, error) { + errors := make([]error, 0) + x := &NamedLinkOrReference{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // LinkOrReference value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewLinkOrReference(v2, compiler.NewContext("value", context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedMediaType creates an object of type NamedMediaType if possible, returning an error if not. +func NewNamedMediaType(in interface{}, context *compiler.Context) (*NamedMediaType, error) { + errors := make([]error, 0) + x := &NamedMediaType{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // MediaType value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewMediaType(v2, compiler.NewContext("value", context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedParameterOrReference creates an object of type NamedParameterOrReference if possible, returning an error if not. +func NewNamedParameterOrReference(in interface{}, context *compiler.Context) (*NamedParameterOrReference, error) { + errors := make([]error, 0) + x := &NamedParameterOrReference{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // ParameterOrReference value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewParameterOrReference(v2, compiler.NewContext("value", context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedPathItem creates an object of type NamedPathItem if possible, returning an error if not. +func NewNamedPathItem(in interface{}, context *compiler.Context) (*NamedPathItem, error) { + errors := make([]error, 0) + x := &NamedPathItem{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // PathItem value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewPathItem(v2, compiler.NewContext("value", context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedRequestBodyOrReference creates an object of type NamedRequestBodyOrReference if possible, returning an error if not. +func NewNamedRequestBodyOrReference(in interface{}, context *compiler.Context) (*NamedRequestBodyOrReference, error) { + errors := make([]error, 0) + x := &NamedRequestBodyOrReference{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // RequestBodyOrReference value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewRequestBodyOrReference(v2, compiler.NewContext("value", context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedResponseOrReference creates an object of type NamedResponseOrReference if possible, returning an error if not. +func NewNamedResponseOrReference(in interface{}, context *compiler.Context) (*NamedResponseOrReference, error) { + errors := make([]error, 0) + x := &NamedResponseOrReference{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // ResponseOrReference value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewResponseOrReference(v2, compiler.NewContext("value", context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedSchemaOrReference creates an object of type NamedSchemaOrReference if possible, returning an error if not. +func NewNamedSchemaOrReference(in interface{}, context *compiler.Context) (*NamedSchemaOrReference, error) { + errors := make([]error, 0) + x := &NamedSchemaOrReference{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // SchemaOrReference value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewSchemaOrReference(v2, compiler.NewContext("value", context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedSecuritySchemeOrReference creates an object of type NamedSecuritySchemeOrReference if possible, returning an error if not. +func NewNamedSecuritySchemeOrReference(in interface{}, context *compiler.Context) (*NamedSecuritySchemeOrReference, error) { + errors := make([]error, 0) + x := &NamedSecuritySchemeOrReference{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // SecuritySchemeOrReference value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewSecuritySchemeOrReference(v2, compiler.NewContext("value", context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedServerVariable creates an object of type NamedServerVariable if possible, returning an error if not. +func NewNamedServerVariable(in interface{}, context *compiler.Context) (*NamedServerVariable, error) { + errors := make([]error, 0) + x := &NamedServerVariable{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // ServerVariable value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewServerVariable(v2, compiler.NewContext("value", context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedString creates an object of type NamedString if possible, returning an error if not. +func NewNamedString(in interface{}, context *compiler.Context) (*NamedString, error) { + errors := make([]error, 0) + x := &NamedString{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + x.Value, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for value: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewOauthFlow creates an object of type OauthFlow if possible, returning an error if not. +func NewOauthFlow(in interface{}, context *compiler.Context) (*OauthFlow, error) { + errors := make([]error, 0) + x := &OauthFlow{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"authorizationUrl", "refreshUrl", "scopes", "tokenUrl"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string authorization_url = 1; + v1 := compiler.MapValueForKey(m, "authorizationUrl") + if v1 != nil { + x.AuthorizationUrl, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for authorizationUrl: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string token_url = 2; + v2 := compiler.MapValueForKey(m, "tokenUrl") + if v2 != nil { + x.TokenUrl, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for tokenUrl: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string refresh_url = 3; + v3 := compiler.MapValueForKey(m, "refreshUrl") + if v3 != nil { + x.RefreshUrl, ok = v3.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for refreshUrl: %+v (%T)", v3, v3) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Strings scopes = 4; + v4 := compiler.MapValueForKey(m, "scopes") + if v4 != nil { + var err error + x.Scopes, err = NewStrings(v4, compiler.NewContext("scopes", context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny specification_extension = 5; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewOauthFlows creates an object of type OauthFlows if possible, returning an error if not. +func NewOauthFlows(in interface{}, context *compiler.Context) (*OauthFlows, error) { + errors := make([]error, 0) + x := &OauthFlows{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"authorizationCode", "clientCredentials", "implicit", "password"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // OauthFlow implicit = 1; + v1 := compiler.MapValueForKey(m, "implicit") + if v1 != nil { + var err error + x.Implicit, err = NewOauthFlow(v1, compiler.NewContext("implicit", context)) + if err != nil { + errors = append(errors, err) + } + } + // OauthFlow password = 2; + v2 := compiler.MapValueForKey(m, "password") + if v2 != nil { + var err error + x.Password, err = NewOauthFlow(v2, compiler.NewContext("password", context)) + if err != nil { + errors = append(errors, err) + } + } + // OauthFlow client_credentials = 3; + v3 := compiler.MapValueForKey(m, "clientCredentials") + if v3 != nil { + var err error + x.ClientCredentials, err = NewOauthFlow(v3, compiler.NewContext("clientCredentials", context)) + if err != nil { + errors = append(errors, err) + } + } + // OauthFlow authorization_code = 4; + v4 := compiler.MapValueForKey(m, "authorizationCode") + if v4 != nil { + var err error + x.AuthorizationCode, err = NewOauthFlow(v4, compiler.NewContext("authorizationCode", context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny specification_extension = 5; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewObject creates an object of type Object if possible, returning an error if not. +func NewObject(in interface{}, context *compiler.Context) (*Object, error) { + errors := make([]error, 0) + x := &Object{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedAny additional_properties = 1; + // MAP: Any + x.AdditionalProperties = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewOperation creates an object of type Operation if possible, returning an error if not. +func NewOperation(in interface{}, context *compiler.Context) (*Operation, error) { + errors := make([]error, 0) + x := &Operation{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"responses"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"callbacks", "deprecated", "description", "externalDocs", "operationId", "parameters", "requestBody", "responses", "security", "servers", "summary", "tags"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // repeated string tags = 1; + v1 := compiler.MapValueForKey(m, "tags") + if v1 != nil { + v, ok := v1.([]interface{}) + if ok { + x.Tags = compiler.ConvertInterfaceArrayToStringArray(v) + } else { + message := fmt.Sprintf("has unexpected value for tags: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string summary = 2; + v2 := compiler.MapValueForKey(m, "summary") + if v2 != nil { + x.Summary, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for summary: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 3; + v3 := compiler.MapValueForKey(m, "description") + if v3 != nil { + x.Description, ok = v3.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v3, v3) + errors = append(errors, compiler.NewError(context, message)) + } + } + // ExternalDocs external_docs = 4; + v4 := compiler.MapValueForKey(m, "externalDocs") + if v4 != nil { + var err error + x.ExternalDocs, err = NewExternalDocs(v4, compiler.NewContext("externalDocs", context)) + if err != nil { + errors = append(errors, err) + } + } + // string operation_id = 5; + v5 := compiler.MapValueForKey(m, "operationId") + if v5 != nil { + x.OperationId, ok = v5.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for operationId: %+v (%T)", v5, v5) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated ParameterOrReference parameters = 6; + v6 := compiler.MapValueForKey(m, "parameters") + if v6 != nil { + // repeated ParameterOrReference + x.Parameters = make([]*ParameterOrReference, 0) + a, ok := v6.([]interface{}) + if ok { + for _, item := range a { + y, err := NewParameterOrReference(item, compiler.NewContext("parameters", context)) + if err != nil { + errors = append(errors, err) + } + x.Parameters = append(x.Parameters, y) + } + } + } + // RequestBodyOrReference request_body = 7; + v7 := compiler.MapValueForKey(m, "requestBody") + if v7 != nil { + var err error + x.RequestBody, err = NewRequestBodyOrReference(v7, compiler.NewContext("requestBody", context)) + if err != nil { + errors = append(errors, err) + } + } + // Responses responses = 8; + v8 := compiler.MapValueForKey(m, "responses") + if v8 != nil { + var err error + x.Responses, err = NewResponses(v8, compiler.NewContext("responses", context)) + if err != nil { + errors = append(errors, err) + } + } + // CallbacksOrReferences callbacks = 9; + v9 := compiler.MapValueForKey(m, "callbacks") + if v9 != nil { + var err error + x.Callbacks, err = NewCallbacksOrReferences(v9, compiler.NewContext("callbacks", context)) + if err != nil { + errors = append(errors, err) + } + } + // bool deprecated = 10; + v10 := compiler.MapValueForKey(m, "deprecated") + if v10 != nil { + x.Deprecated, ok = v10.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for deprecated: %+v (%T)", v10, v10) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated SecurityRequirement security = 11; + v11 := compiler.MapValueForKey(m, "security") + if v11 != nil { + // repeated SecurityRequirement + x.Security = make([]*SecurityRequirement, 0) + a, ok := v11.([]interface{}) + if ok { + for _, item := range a { + y, err := NewSecurityRequirement(item, compiler.NewContext("security", context)) + if err != nil { + errors = append(errors, err) + } + x.Security = append(x.Security, y) + } + } + } + // repeated Server servers = 12; + v12 := compiler.MapValueForKey(m, "servers") + if v12 != nil { + // repeated Server + x.Servers = make([]*Server, 0) + a, ok := v12.([]interface{}) + if ok { + for _, item := range a { + y, err := NewServer(item, compiler.NewContext("servers", context)) + if err != nil { + errors = append(errors, err) + } + x.Servers = append(x.Servers, y) + } + } + } + // repeated NamedAny specification_extension = 13; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewParameter creates an object of type Parameter if possible, returning an error if not. +func NewParameter(in interface{}, context *compiler.Context) (*Parameter, error) { + errors := make([]error, 0) + x := &Parameter{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"in", "name"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"allowEmptyValue", "allowReserved", "content", "deprecated", "description", "example", "examples", "explode", "in", "name", "required", "schema", "style"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string in = 2; + v2 := compiler.MapValueForKey(m, "in") + if v2 != nil { + x.In, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for in: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 3; + v3 := compiler.MapValueForKey(m, "description") + if v3 != nil { + x.Description, ok = v3.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v3, v3) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool required = 4; + v4 := compiler.MapValueForKey(m, "required") + if v4 != nil { + x.Required, ok = v4.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for required: %+v (%T)", v4, v4) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool deprecated = 5; + v5 := compiler.MapValueForKey(m, "deprecated") + if v5 != nil { + x.Deprecated, ok = v5.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for deprecated: %+v (%T)", v5, v5) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool allow_empty_value = 6; + v6 := compiler.MapValueForKey(m, "allowEmptyValue") + if v6 != nil { + x.AllowEmptyValue, ok = v6.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for allowEmptyValue: %+v (%T)", v6, v6) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string style = 7; + v7 := compiler.MapValueForKey(m, "style") + if v7 != nil { + x.Style, ok = v7.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for style: %+v (%T)", v7, v7) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool explode = 8; + v8 := compiler.MapValueForKey(m, "explode") + if v8 != nil { + x.Explode, ok = v8.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for explode: %+v (%T)", v8, v8) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool allow_reserved = 9; + v9 := compiler.MapValueForKey(m, "allowReserved") + if v9 != nil { + x.AllowReserved, ok = v9.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for allowReserved: %+v (%T)", v9, v9) + errors = append(errors, compiler.NewError(context, message)) + } + } + // SchemaOrReference schema = 10; + v10 := compiler.MapValueForKey(m, "schema") + if v10 != nil { + var err error + x.Schema, err = NewSchemaOrReference(v10, compiler.NewContext("schema", context)) + if err != nil { + errors = append(errors, err) + } + } + // Any example = 11; + v11 := compiler.MapValueForKey(m, "example") + if v11 != nil { + var err error + x.Example, err = NewAny(v11, compiler.NewContext("example", context)) + if err != nil { + errors = append(errors, err) + } + } + // ExamplesOrReferences examples = 12; + v12 := compiler.MapValueForKey(m, "examples") + if v12 != nil { + var err error + x.Examples, err = NewExamplesOrReferences(v12, compiler.NewContext("examples", context)) + if err != nil { + errors = append(errors, err) + } + } + // MediaTypes content = 13; + v13 := compiler.MapValueForKey(m, "content") + if v13 != nil { + var err error + x.Content, err = NewMediaTypes(v13, compiler.NewContext("content", context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny specification_extension = 14; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewParameterOrReference creates an object of type ParameterOrReference if possible, returning an error if not. +func NewParameterOrReference(in interface{}, context *compiler.Context) (*ParameterOrReference, error) { + errors := make([]error, 0) + x := &ParameterOrReference{} + matched := false + // Parameter parameter = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewParameter(m, compiler.NewContext("parameter", context)) + if matchingError == nil { + x.Oneof = &ParameterOrReference_Parameter{Parameter: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // Reference reference = 2; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewReference(m, compiler.NewContext("reference", context)) + if matchingError == nil { + x.Oneof = &ParameterOrReference_Reference{Reference: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewParametersOrReferences creates an object of type ParametersOrReferences if possible, returning an error if not. +func NewParametersOrReferences(in interface{}, context *compiler.Context) (*ParametersOrReferences, error) { + errors := make([]error, 0) + x := &ParametersOrReferences{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedParameterOrReference additional_properties = 1; + // MAP: ParameterOrReference + x.AdditionalProperties = make([]*NamedParameterOrReference, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + pair := &NamedParameterOrReference{} + pair.Name = k + var err error + pair.Value, err = NewParameterOrReference(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewPathItem creates an object of type PathItem if possible, returning an error if not. +func NewPathItem(in interface{}, context *compiler.Context) (*PathItem, error) { + errors := make([]error, 0) + x := &PathItem{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"$ref", "delete", "description", "get", "head", "options", "parameters", "patch", "post", "put", "servers", "summary", "trace"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string _ref = 1; + v1 := compiler.MapValueForKey(m, "$ref") + if v1 != nil { + x.XRef, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for $ref: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string summary = 2; + v2 := compiler.MapValueForKey(m, "summary") + if v2 != nil { + x.Summary, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for summary: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 3; + v3 := compiler.MapValueForKey(m, "description") + if v3 != nil { + x.Description, ok = v3.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v3, v3) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Operation get = 4; + v4 := compiler.MapValueForKey(m, "get") + if v4 != nil { + var err error + x.Get, err = NewOperation(v4, compiler.NewContext("get", context)) + if err != nil { + errors = append(errors, err) + } + } + // Operation put = 5; + v5 := compiler.MapValueForKey(m, "put") + if v5 != nil { + var err error + x.Put, err = NewOperation(v5, compiler.NewContext("put", context)) + if err != nil { + errors = append(errors, err) + } + } + // Operation post = 6; + v6 := compiler.MapValueForKey(m, "post") + if v6 != nil { + var err error + x.Post, err = NewOperation(v6, compiler.NewContext("post", context)) + if err != nil { + errors = append(errors, err) + } + } + // Operation delete = 7; + v7 := compiler.MapValueForKey(m, "delete") + if v7 != nil { + var err error + x.Delete, err = NewOperation(v7, compiler.NewContext("delete", context)) + if err != nil { + errors = append(errors, err) + } + } + // Operation options = 8; + v8 := compiler.MapValueForKey(m, "options") + if v8 != nil { + var err error + x.Options, err = NewOperation(v8, compiler.NewContext("options", context)) + if err != nil { + errors = append(errors, err) + } + } + // Operation head = 9; + v9 := compiler.MapValueForKey(m, "head") + if v9 != nil { + var err error + x.Head, err = NewOperation(v9, compiler.NewContext("head", context)) + if err != nil { + errors = append(errors, err) + } + } + // Operation patch = 10; + v10 := compiler.MapValueForKey(m, "patch") + if v10 != nil { + var err error + x.Patch, err = NewOperation(v10, compiler.NewContext("patch", context)) + if err != nil { + errors = append(errors, err) + } + } + // Operation trace = 11; + v11 := compiler.MapValueForKey(m, "trace") + if v11 != nil { + var err error + x.Trace, err = NewOperation(v11, compiler.NewContext("trace", context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated Server servers = 12; + v12 := compiler.MapValueForKey(m, "servers") + if v12 != nil { + // repeated Server + x.Servers = make([]*Server, 0) + a, ok := v12.([]interface{}) + if ok { + for _, item := range a { + y, err := NewServer(item, compiler.NewContext("servers", context)) + if err != nil { + errors = append(errors, err) + } + x.Servers = append(x.Servers, y) + } + } + } + // repeated ParameterOrReference parameters = 13; + v13 := compiler.MapValueForKey(m, "parameters") + if v13 != nil { + // repeated ParameterOrReference + x.Parameters = make([]*ParameterOrReference, 0) + a, ok := v13.([]interface{}) + if ok { + for _, item := range a { + y, err := NewParameterOrReference(item, compiler.NewContext("parameters", context)) + if err != nil { + errors = append(errors, err) + } + x.Parameters = append(x.Parameters, y) + } + } + } + // repeated NamedAny specification_extension = 14; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewPaths creates an object of type Paths if possible, returning an error if not. +func NewPaths(in interface{}, context *compiler.Context) (*Paths, error) { + errors := make([]error, 0) + x := &Paths{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{} + allowedPatterns := []*regexp.Regexp{pattern2, pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // repeated NamedPathItem path = 1; + // MAP: PathItem ^/ + x.Path = make([]*NamedPathItem, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "/") { + pair := &NamedPathItem{} + pair.Name = k + var err error + pair.Value, err = NewPathItem(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + x.Path = append(x.Path, pair) + } + } + } + // repeated NamedAny specification_extension = 2; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewProperties creates an object of type Properties if possible, returning an error if not. +func NewProperties(in interface{}, context *compiler.Context) (*Properties, error) { + errors := make([]error, 0) + x := &Properties{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedSchemaOrReference additional_properties = 1; + // MAP: SchemaOrReference + x.AdditionalProperties = make([]*NamedSchemaOrReference, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + pair := &NamedSchemaOrReference{} + pair.Name = k + var err error + pair.Value, err = NewSchemaOrReference(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewReference creates an object of type Reference if possible, returning an error if not. +func NewReference(in interface{}, context *compiler.Context) (*Reference, error) { + errors := make([]error, 0) + x := &Reference{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"$ref"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"$ref"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string _ref = 1; + v1 := compiler.MapValueForKey(m, "$ref") + if v1 != nil { + x.XRef, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for $ref: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewRequestBodiesOrReferences creates an object of type RequestBodiesOrReferences if possible, returning an error if not. +func NewRequestBodiesOrReferences(in interface{}, context *compiler.Context) (*RequestBodiesOrReferences, error) { + errors := make([]error, 0) + x := &RequestBodiesOrReferences{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedRequestBodyOrReference additional_properties = 1; + // MAP: RequestBodyOrReference + x.AdditionalProperties = make([]*NamedRequestBodyOrReference, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + pair := &NamedRequestBodyOrReference{} + pair.Name = k + var err error + pair.Value, err = NewRequestBodyOrReference(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewRequestBody creates an object of type RequestBody if possible, returning an error if not. +func NewRequestBody(in interface{}, context *compiler.Context) (*RequestBody, error) { + errors := make([]error, 0) + x := &RequestBody{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"content"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"content", "description", "required"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string description = 1; + v1 := compiler.MapValueForKey(m, "description") + if v1 != nil { + x.Description, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // MediaTypes content = 2; + v2 := compiler.MapValueForKey(m, "content") + if v2 != nil { + var err error + x.Content, err = NewMediaTypes(v2, compiler.NewContext("content", context)) + if err != nil { + errors = append(errors, err) + } + } + // bool required = 3; + v3 := compiler.MapValueForKey(m, "required") + if v3 != nil { + x.Required, ok = v3.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for required: %+v (%T)", v3, v3) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny specification_extension = 4; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewRequestBodyOrReference creates an object of type RequestBodyOrReference if possible, returning an error if not. +func NewRequestBodyOrReference(in interface{}, context *compiler.Context) (*RequestBodyOrReference, error) { + errors := make([]error, 0) + x := &RequestBodyOrReference{} + matched := false + // RequestBody request_body = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewRequestBody(m, compiler.NewContext("requestBody", context)) + if matchingError == nil { + x.Oneof = &RequestBodyOrReference_RequestBody{RequestBody: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // Reference reference = 2; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewReference(m, compiler.NewContext("reference", context)) + if matchingError == nil { + x.Oneof = &RequestBodyOrReference_Reference{Reference: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewResponse creates an object of type Response if possible, returning an error if not. +func NewResponse(in interface{}, context *compiler.Context) (*Response, error) { + errors := make([]error, 0) + x := &Response{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"description"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"content", "description", "headers", "links"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string description = 1; + v1 := compiler.MapValueForKey(m, "description") + if v1 != nil { + x.Description, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // HeadersOrReferences headers = 2; + v2 := compiler.MapValueForKey(m, "headers") + if v2 != nil { + var err error + x.Headers, err = NewHeadersOrReferences(v2, compiler.NewContext("headers", context)) + if err != nil { + errors = append(errors, err) + } + } + // MediaTypes content = 3; + v3 := compiler.MapValueForKey(m, "content") + if v3 != nil { + var err error + x.Content, err = NewMediaTypes(v3, compiler.NewContext("content", context)) + if err != nil { + errors = append(errors, err) + } + } + // LinksOrReferences links = 4; + v4 := compiler.MapValueForKey(m, "links") + if v4 != nil { + var err error + x.Links, err = NewLinksOrReferences(v4, compiler.NewContext("links", context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny specification_extension = 5; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewResponseOrReference creates an object of type ResponseOrReference if possible, returning an error if not. +func NewResponseOrReference(in interface{}, context *compiler.Context) (*ResponseOrReference, error) { + errors := make([]error, 0) + x := &ResponseOrReference{} + matched := false + // Response response = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewResponse(m, compiler.NewContext("response", context)) + if matchingError == nil { + x.Oneof = &ResponseOrReference_Response{Response: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // Reference reference = 2; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewReference(m, compiler.NewContext("reference", context)) + if matchingError == nil { + x.Oneof = &ResponseOrReference_Reference{Reference: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewResponses creates an object of type Responses if possible, returning an error if not. +func NewResponses(in interface{}, context *compiler.Context) (*Responses, error) { + errors := make([]error, 0) + x := &Responses{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"default"} + allowedPatterns := []*regexp.Regexp{pattern3, pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // ResponseOrReference default = 1; + v1 := compiler.MapValueForKey(m, "default") + if v1 != nil { + var err error + x.Default, err = NewResponseOrReference(v1, compiler.NewContext("default", context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedResponseOrReference response_or_reference = 2; + // MAP: ResponseOrReference ^([0-9X]{3})$ + x.ResponseOrReference = make([]*NamedResponseOrReference, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if pattern3.MatchString(k) { + pair := &NamedResponseOrReference{} + pair.Name = k + var err error + pair.Value, err = NewResponseOrReference(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + x.ResponseOrReference = append(x.ResponseOrReference, pair) + } + } + } + // repeated NamedAny specification_extension = 3; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewResponsesOrReferences creates an object of type ResponsesOrReferences if possible, returning an error if not. +func NewResponsesOrReferences(in interface{}, context *compiler.Context) (*ResponsesOrReferences, error) { + errors := make([]error, 0) + x := &ResponsesOrReferences{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedResponseOrReference additional_properties = 1; + // MAP: ResponseOrReference + x.AdditionalProperties = make([]*NamedResponseOrReference, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + pair := &NamedResponseOrReference{} + pair.Name = k + var err error + pair.Value, err = NewResponseOrReference(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewSchema creates an object of type Schema if possible, returning an error if not. +func NewSchema(in interface{}, context *compiler.Context) (*Schema, error) { + errors := make([]error, 0) + x := &Schema{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"additionalProperties", "allOf", "anyOf", "default", "deprecated", "description", "discriminator", "enum", "example", "exclusiveMaximum", "exclusiveMinimum", "externalDocs", "format", "items", "maxItems", "maxLength", "maxProperties", "maximum", "minItems", "minLength", "minProperties", "minimum", "multipleOf", "not", "nullable", "oneOf", "pattern", "properties", "readOnly", "required", "title", "type", "uniqueItems", "writeOnly", "xml"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // bool nullable = 1; + v1 := compiler.MapValueForKey(m, "nullable") + if v1 != nil { + x.Nullable, ok = v1.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for nullable: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Discriminator discriminator = 2; + v2 := compiler.MapValueForKey(m, "discriminator") + if v2 != nil { + var err error + x.Discriminator, err = NewDiscriminator(v2, compiler.NewContext("discriminator", context)) + if err != nil { + errors = append(errors, err) + } + } + // bool read_only = 3; + v3 := compiler.MapValueForKey(m, "readOnly") + if v3 != nil { + x.ReadOnly, ok = v3.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for readOnly: %+v (%T)", v3, v3) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool write_only = 4; + v4 := compiler.MapValueForKey(m, "writeOnly") + if v4 != nil { + x.WriteOnly, ok = v4.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for writeOnly: %+v (%T)", v4, v4) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Xml xml = 5; + v5 := compiler.MapValueForKey(m, "xml") + if v5 != nil { + var err error + x.Xml, err = NewXml(v5, compiler.NewContext("xml", context)) + if err != nil { + errors = append(errors, err) + } + } + // ExternalDocs external_docs = 6; + v6 := compiler.MapValueForKey(m, "externalDocs") + if v6 != nil { + var err error + x.ExternalDocs, err = NewExternalDocs(v6, compiler.NewContext("externalDocs", context)) + if err != nil { + errors = append(errors, err) + } + } + // Any example = 7; + v7 := compiler.MapValueForKey(m, "example") + if v7 != nil { + var err error + x.Example, err = NewAny(v7, compiler.NewContext("example", context)) + if err != nil { + errors = append(errors, err) + } + } + // bool deprecated = 8; + v8 := compiler.MapValueForKey(m, "deprecated") + if v8 != nil { + x.Deprecated, ok = v8.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for deprecated: %+v (%T)", v8, v8) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string title = 9; + v9 := compiler.MapValueForKey(m, "title") + if v9 != nil { + x.Title, ok = v9.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for title: %+v (%T)", v9, v9) + errors = append(errors, compiler.NewError(context, message)) + } + } + // float multiple_of = 10; + v10 := compiler.MapValueForKey(m, "multipleOf") + if v10 != nil { + switch v10 := v10.(type) { + case float64: + x.MultipleOf = v10 + case float32: + x.MultipleOf = float64(v10) + case uint64: + x.MultipleOf = float64(v10) + case uint32: + x.MultipleOf = float64(v10) + case int64: + x.MultipleOf = float64(v10) + case int32: + x.MultipleOf = float64(v10) + case int: + x.MultipleOf = float64(v10) + default: + message := fmt.Sprintf("has unexpected value for multipleOf: %+v (%T)", v10, v10) + errors = append(errors, compiler.NewError(context, message)) + } + } + // float maximum = 11; + v11 := compiler.MapValueForKey(m, "maximum") + if v11 != nil { + switch v11 := v11.(type) { + case float64: + x.Maximum = v11 + case float32: + x.Maximum = float64(v11) + case uint64: + x.Maximum = float64(v11) + case uint32: + x.Maximum = float64(v11) + case int64: + x.Maximum = float64(v11) + case int32: + x.Maximum = float64(v11) + case int: + x.Maximum = float64(v11) + default: + message := fmt.Sprintf("has unexpected value for maximum: %+v (%T)", v11, v11) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_maximum = 12; + v12 := compiler.MapValueForKey(m, "exclusiveMaximum") + if v12 != nil { + x.ExclusiveMaximum, ok = v12.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMaximum: %+v (%T)", v12, v12) + errors = append(errors, compiler.NewError(context, message)) + } + } + // float minimum = 13; + v13 := compiler.MapValueForKey(m, "minimum") + if v13 != nil { + switch v13 := v13.(type) { + case float64: + x.Minimum = v13 + case float32: + x.Minimum = float64(v13) + case uint64: + x.Minimum = float64(v13) + case uint32: + x.Minimum = float64(v13) + case int64: + x.Minimum = float64(v13) + case int32: + x.Minimum = float64(v13) + case int: + x.Minimum = float64(v13) + default: + message := fmt.Sprintf("has unexpected value for minimum: %+v (%T)", v13, v13) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_minimum = 14; + v14 := compiler.MapValueForKey(m, "exclusiveMinimum") + if v14 != nil { + x.ExclusiveMinimum, ok = v14.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMinimum: %+v (%T)", v14, v14) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_length = 15; + v15 := compiler.MapValueForKey(m, "maxLength") + if v15 != nil { + t, ok := v15.(int) + if ok { + x.MaxLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxLength: %+v (%T)", v15, v15) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_length = 16; + v16 := compiler.MapValueForKey(m, "minLength") + if v16 != nil { + t, ok := v16.(int) + if ok { + x.MinLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minLength: %+v (%T)", v16, v16) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string pattern = 17; + v17 := compiler.MapValueForKey(m, "pattern") + if v17 != nil { + x.Pattern, ok = v17.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for pattern: %+v (%T)", v17, v17) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_items = 18; + v18 := compiler.MapValueForKey(m, "maxItems") + if v18 != nil { + t, ok := v18.(int) + if ok { + x.MaxItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxItems: %+v (%T)", v18, v18) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_items = 19; + v19 := compiler.MapValueForKey(m, "minItems") + if v19 != nil { + t, ok := v19.(int) + if ok { + x.MinItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minItems: %+v (%T)", v19, v19) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool unique_items = 20; + v20 := compiler.MapValueForKey(m, "uniqueItems") + if v20 != nil { + x.UniqueItems, ok = v20.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for uniqueItems: %+v (%T)", v20, v20) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_properties = 21; + v21 := compiler.MapValueForKey(m, "maxProperties") + if v21 != nil { + t, ok := v21.(int) + if ok { + x.MaxProperties = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxProperties: %+v (%T)", v21, v21) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_properties = 22; + v22 := compiler.MapValueForKey(m, "minProperties") + if v22 != nil { + t, ok := v22.(int) + if ok { + x.MinProperties = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minProperties: %+v (%T)", v22, v22) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated string required = 23; + v23 := compiler.MapValueForKey(m, "required") + if v23 != nil { + v, ok := v23.([]interface{}) + if ok { + x.Required = compiler.ConvertInterfaceArrayToStringArray(v) + } else { + message := fmt.Sprintf("has unexpected value for required: %+v (%T)", v23, v23) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated Any enum = 24; + v24 := compiler.MapValueForKey(m, "enum") + if v24 != nil { + // repeated Any + x.Enum = make([]*Any, 0) + a, ok := v24.([]interface{}) + if ok { + for _, item := range a { + y, err := NewAny(item, compiler.NewContext("enum", context)) + if err != nil { + errors = append(errors, err) + } + x.Enum = append(x.Enum, y) + } + } + } + // string type = 25; + v25 := compiler.MapValueForKey(m, "type") + if v25 != nil { + x.Type, ok = v25.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %+v (%T)", v25, v25) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated SchemaOrReference all_of = 26; + v26 := compiler.MapValueForKey(m, "allOf") + if v26 != nil { + // repeated SchemaOrReference + x.AllOf = make([]*SchemaOrReference, 0) + a, ok := v26.([]interface{}) + if ok { + for _, item := range a { + y, err := NewSchemaOrReference(item, compiler.NewContext("allOf", context)) + if err != nil { + errors = append(errors, err) + } + x.AllOf = append(x.AllOf, y) + } + } + } + // repeated SchemaOrReference one_of = 27; + v27 := compiler.MapValueForKey(m, "oneOf") + if v27 != nil { + // repeated SchemaOrReference + x.OneOf = make([]*SchemaOrReference, 0) + a, ok := v27.([]interface{}) + if ok { + for _, item := range a { + y, err := NewSchemaOrReference(item, compiler.NewContext("oneOf", context)) + if err != nil { + errors = append(errors, err) + } + x.OneOf = append(x.OneOf, y) + } + } + } + // repeated SchemaOrReference any_of = 28; + v28 := compiler.MapValueForKey(m, "anyOf") + if v28 != nil { + // repeated SchemaOrReference + x.AnyOf = make([]*SchemaOrReference, 0) + a, ok := v28.([]interface{}) + if ok { + for _, item := range a { + y, err := NewSchemaOrReference(item, compiler.NewContext("anyOf", context)) + if err != nil { + errors = append(errors, err) + } + x.AnyOf = append(x.AnyOf, y) + } + } + } + // Schema not = 29; + v29 := compiler.MapValueForKey(m, "not") + if v29 != nil { + var err error + x.Not, err = NewSchema(v29, compiler.NewContext("not", context)) + if err != nil { + errors = append(errors, err) + } + } + // ItemsItem items = 30; + v30 := compiler.MapValueForKey(m, "items") + if v30 != nil { + var err error + x.Items, err = NewItemsItem(v30, compiler.NewContext("items", context)) + if err != nil { + errors = append(errors, err) + } + } + // Properties properties = 31; + v31 := compiler.MapValueForKey(m, "properties") + if v31 != nil { + var err error + x.Properties, err = NewProperties(v31, compiler.NewContext("properties", context)) + if err != nil { + errors = append(errors, err) + } + } + // AdditionalPropertiesItem additional_properties = 32; + v32 := compiler.MapValueForKey(m, "additionalProperties") + if v32 != nil { + var err error + x.AdditionalProperties, err = NewAdditionalPropertiesItem(v32, compiler.NewContext("additionalProperties", context)) + if err != nil { + errors = append(errors, err) + } + } + // DefaultType default = 33; + v33 := compiler.MapValueForKey(m, "default") + if v33 != nil { + var err error + x.Default, err = NewDefaultType(v33, compiler.NewContext("default", context)) + if err != nil { + errors = append(errors, err) + } + } + // string description = 34; + v34 := compiler.MapValueForKey(m, "description") + if v34 != nil { + x.Description, ok = v34.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v34, v34) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string format = 35; + v35 := compiler.MapValueForKey(m, "format") + if v35 != nil { + x.Format, ok = v35.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for format: %+v (%T)", v35, v35) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny specification_extension = 36; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewSchemaOrReference creates an object of type SchemaOrReference if possible, returning an error if not. +func NewSchemaOrReference(in interface{}, context *compiler.Context) (*SchemaOrReference, error) { + errors := make([]error, 0) + x := &SchemaOrReference{} + matched := false + // Schema schema = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewSchema(m, compiler.NewContext("schema", context)) + if matchingError == nil { + x.Oneof = &SchemaOrReference_Schema{Schema: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // Reference reference = 2; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewReference(m, compiler.NewContext("reference", context)) + if matchingError == nil { + x.Oneof = &SchemaOrReference_Reference{Reference: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewSchemasOrReferences creates an object of type SchemasOrReferences if possible, returning an error if not. +func NewSchemasOrReferences(in interface{}, context *compiler.Context) (*SchemasOrReferences, error) { + errors := make([]error, 0) + x := &SchemasOrReferences{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedSchemaOrReference additional_properties = 1; + // MAP: SchemaOrReference + x.AdditionalProperties = make([]*NamedSchemaOrReference, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + pair := &NamedSchemaOrReference{} + pair.Name = k + var err error + pair.Value, err = NewSchemaOrReference(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewSecurityRequirement creates an object of type SecurityRequirement if possible, returning an error if not. +func NewSecurityRequirement(in interface{}, context *compiler.Context) (*SecurityRequirement, error) { + errors := make([]error, 0) + x := &SecurityRequirement{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{} + allowedPatterns := []*regexp.Regexp{pattern4} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewSecurityScheme creates an object of type SecurityScheme if possible, returning an error if not. +func NewSecurityScheme(in interface{}, context *compiler.Context) (*SecurityScheme, error) { + errors := make([]error, 0) + x := &SecurityScheme{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"type"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"bearerFormat", "description", "flows", "in", "name", "openIdConnectUrl", "scheme", "type"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string type = 1; + v1 := compiler.MapValueForKey(m, "type") + if v1 != nil { + x.Type, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 2; + v2 := compiler.MapValueForKey(m, "description") + if v2 != nil { + x.Description, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string name = 3; + v3 := compiler.MapValueForKey(m, "name") + if v3 != nil { + x.Name, ok = v3.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v3, v3) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string in = 4; + v4 := compiler.MapValueForKey(m, "in") + if v4 != nil { + x.In, ok = v4.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for in: %+v (%T)", v4, v4) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string scheme = 5; + v5 := compiler.MapValueForKey(m, "scheme") + if v5 != nil { + x.Scheme, ok = v5.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for scheme: %+v (%T)", v5, v5) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string bearer_format = 6; + v6 := compiler.MapValueForKey(m, "bearerFormat") + if v6 != nil { + x.BearerFormat, ok = v6.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for bearerFormat: %+v (%T)", v6, v6) + errors = append(errors, compiler.NewError(context, message)) + } + } + // OauthFlows flows = 7; + v7 := compiler.MapValueForKey(m, "flows") + if v7 != nil { + var err error + x.Flows, err = NewOauthFlows(v7, compiler.NewContext("flows", context)) + if err != nil { + errors = append(errors, err) + } + } + // string open_id_connect_url = 8; + v8 := compiler.MapValueForKey(m, "openIdConnectUrl") + if v8 != nil { + x.OpenIdConnectUrl, ok = v8.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for openIdConnectUrl: %+v (%T)", v8, v8) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny specification_extension = 9; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewSecuritySchemeOrReference creates an object of type SecuritySchemeOrReference if possible, returning an error if not. +func NewSecuritySchemeOrReference(in interface{}, context *compiler.Context) (*SecuritySchemeOrReference, error) { + errors := make([]error, 0) + x := &SecuritySchemeOrReference{} + matched := false + // SecurityScheme security_scheme = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewSecurityScheme(m, compiler.NewContext("securityScheme", context)) + if matchingError == nil { + x.Oneof = &SecuritySchemeOrReference_SecurityScheme{SecurityScheme: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // Reference reference = 2; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewReference(m, compiler.NewContext("reference", context)) + if matchingError == nil { + x.Oneof = &SecuritySchemeOrReference_Reference{Reference: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewSecuritySchemesOrReferences creates an object of type SecuritySchemesOrReferences if possible, returning an error if not. +func NewSecuritySchemesOrReferences(in interface{}, context *compiler.Context) (*SecuritySchemesOrReferences, error) { + errors := make([]error, 0) + x := &SecuritySchemesOrReferences{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedSecuritySchemeOrReference additional_properties = 1; + // MAP: SecuritySchemeOrReference + x.AdditionalProperties = make([]*NamedSecuritySchemeOrReference, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + pair := &NamedSecuritySchemeOrReference{} + pair.Name = k + var err error + pair.Value, err = NewSecuritySchemeOrReference(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewServer creates an object of type Server if possible, returning an error if not. +func NewServer(in interface{}, context *compiler.Context) (*Server, error) { + errors := make([]error, 0) + x := &Server{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"url"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"description", "url", "variables"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string url = 1; + v1 := compiler.MapValueForKey(m, "url") + if v1 != nil { + x.Url, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for url: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 2; + v2 := compiler.MapValueForKey(m, "description") + if v2 != nil { + x.Description, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // ServerVariables variables = 3; + v3 := compiler.MapValueForKey(m, "variables") + if v3 != nil { + var err error + x.Variables, err = NewServerVariables(v3, compiler.NewContext("variables", context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny specification_extension = 4; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewServerVariable creates an object of type ServerVariable if possible, returning an error if not. +func NewServerVariable(in interface{}, context *compiler.Context) (*ServerVariable, error) { + errors := make([]error, 0) + x := &ServerVariable{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"default"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"default", "description", "enum"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // repeated string enum = 1; + v1 := compiler.MapValueForKey(m, "enum") + if v1 != nil { + v, ok := v1.([]interface{}) + if ok { + x.Enum = compiler.ConvertInterfaceArrayToStringArray(v) + } else { + message := fmt.Sprintf("has unexpected value for enum: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string default = 2; + v2 := compiler.MapValueForKey(m, "default") + if v2 != nil { + x.Default, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for default: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 3; + v3 := compiler.MapValueForKey(m, "description") + if v3 != nil { + x.Description, ok = v3.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v3, v3) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny specification_extension = 4; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewServerVariables creates an object of type ServerVariables if possible, returning an error if not. +func NewServerVariables(in interface{}, context *compiler.Context) (*ServerVariables, error) { + errors := make([]error, 0) + x := &ServerVariables{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedServerVariable additional_properties = 1; + // MAP: ServerVariable + x.AdditionalProperties = make([]*NamedServerVariable, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + pair := &NamedServerVariable{} + pair.Name = k + var err error + pair.Value, err = NewServerVariable(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewSpecificationExtension creates an object of type SpecificationExtension if possible, returning an error if not. +func NewSpecificationExtension(in interface{}, context *compiler.Context) (*SpecificationExtension, error) { + errors := make([]error, 0) + x := &SpecificationExtension{} + matched := false + switch in := in.(type) { + case bool: + x.Oneof = &SpecificationExtension_Boolean{Boolean: in} + matched = true + case string: + x.Oneof = &SpecificationExtension_String_{String_: in} + matched = true + case int64: + x.Oneof = &SpecificationExtension_Number{Number: float64(in)} + matched = true + case int32: + x.Oneof = &SpecificationExtension_Number{Number: float64(in)} + matched = true + case int: + x.Oneof = &SpecificationExtension_Number{Number: float64(in)} + matched = true + case float64: + x.Oneof = &SpecificationExtension_Number{Number: in} + matched = true + case float32: + x.Oneof = &SpecificationExtension_Number{Number: float64(in)} + matched = true + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewStringArray creates an object of type StringArray if possible, returning an error if not. +func NewStringArray(in interface{}, context *compiler.Context) (*StringArray, error) { + errors := make([]error, 0) + x := &StringArray{} + a, ok := in.([]interface{}) + if !ok { + message := fmt.Sprintf("has unexpected value for StringArray: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + x.Value = make([]string, 0) + for _, s := range a { + x.Value = append(x.Value, s.(string)) + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewStrings creates an object of type Strings if possible, returning an error if not. +func NewStrings(in interface{}, context *compiler.Context) (*Strings, error) { + errors := make([]error, 0) + x := &Strings{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedString additional_properties = 1; + // MAP: string + x.AdditionalProperties = make([]*NamedString, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + pair := &NamedString{} + pair.Name = k + pair.Value = v.(string) + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewTag creates an object of type Tag if possible, returning an error if not. +func NewTag(in interface{}, context *compiler.Context) (*Tag, error) { + errors := make([]error, 0) + x := &Tag{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"name"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"description", "externalDocs", "name"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 2; + v2 := compiler.MapValueForKey(m, "description") + if v2 != nil { + x.Description, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // ExternalDocs external_docs = 3; + v3 := compiler.MapValueForKey(m, "externalDocs") + if v3 != nil { + var err error + x.ExternalDocs, err = NewExternalDocs(v3, compiler.NewContext("externalDocs", context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny specification_extension = 4; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewXml creates an object of type Xml if possible, returning an error if not. +func NewXml(in interface{}, context *compiler.Context) (*Xml, error) { + errors := make([]error, 0) + x := &Xml{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"attribute", "name", "namespace", "prefix", "wrapped"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string namespace = 2; + v2 := compiler.MapValueForKey(m, "namespace") + if v2 != nil { + x.Namespace, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for namespace: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string prefix = 3; + v3 := compiler.MapValueForKey(m, "prefix") + if v3 != nil { + x.Prefix, ok = v3.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for prefix: %+v (%T)", v3, v3) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool attribute = 4; + v4 := compiler.MapValueForKey(m, "attribute") + if v4 != nil { + x.Attribute, ok = v4.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for attribute: %+v (%T)", v4, v4) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool wrapped = 5; + v5 := compiler.MapValueForKey(m, "wrapped") + if v5 != nil { + x.Wrapped, ok = v5.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for wrapped: %+v (%T)", v5, v5) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny specification_extension = 6; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.HandleExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes, _ := yaml.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside AdditionalPropertiesItem objects. +func (m *AdditionalPropertiesItem) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*AdditionalPropertiesItem_SchemaOrReference) + if ok { + _, err := p.SchemaOrReference.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Any objects. +func (m *Any) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside AnyOrExpression objects. +func (m *AnyOrExpression) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*AnyOrExpression_Any) + if ok { + _, err := p.Any.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*AnyOrExpression_Expression) + if ok { + _, err := p.Expression.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside AnysOrExpressions objects. +func (m *AnysOrExpressions) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Callback objects. +func (m *Callback) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.Path { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside CallbackOrReference objects. +func (m *CallbackOrReference) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*CallbackOrReference_Callback) + if ok { + _, err := p.Callback.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*CallbackOrReference_Reference) + if ok { + _, err := p.Reference.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside CallbacksOrReferences objects. +func (m *CallbacksOrReferences) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Components objects. +func (m *Components) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Schemas != nil { + _, err := m.Schemas.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Responses != nil { + _, err := m.Responses.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Parameters != nil { + _, err := m.Parameters.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Examples != nil { + _, err := m.Examples.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.RequestBodies != nil { + _, err := m.RequestBodies.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Headers != nil { + _, err := m.Headers.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.SecuritySchemes != nil { + _, err := m.SecuritySchemes.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Links != nil { + _, err := m.Links.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Callbacks != nil { + _, err := m.Callbacks.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Contact objects. +func (m *Contact) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside DefaultType objects. +func (m *DefaultType) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Discriminator objects. +func (m *Discriminator) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Mapping != nil { + _, err := m.Mapping.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Document objects. +func (m *Document) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Info != nil { + _, err := m.Info.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Servers { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + if m.Paths != nil { + _, err := m.Paths.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Components != nil { + _, err := m.Components.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Security { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.Tags { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + if m.ExternalDocs != nil { + _, err := m.ExternalDocs.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Encoding objects. +func (m *Encoding) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Headers != nil { + _, err := m.Headers.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Encodings objects. +func (m *Encodings) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Example objects. +func (m *Example) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ExampleOrReference objects. +func (m *ExampleOrReference) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*ExampleOrReference_Example) + if ok { + _, err := p.Example.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*ExampleOrReference_Reference) + if ok { + _, err := p.Reference.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Examples objects. +func (m *Examples) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ExamplesOrReferences objects. +func (m *ExamplesOrReferences) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Expression objects. +func (m *Expression) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ExternalDocs objects. +func (m *ExternalDocs) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Header objects. +func (m *Header) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Schema != nil { + _, err := m.Schema.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Example != nil { + _, err := m.Example.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Examples != nil { + _, err := m.Examples.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Content != nil { + _, err := m.Content.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside HeaderOrReference objects. +func (m *HeaderOrReference) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*HeaderOrReference_Header) + if ok { + _, err := p.Header.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*HeaderOrReference_Reference) + if ok { + _, err := p.Reference.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside HeadersOrReferences objects. +func (m *HeadersOrReferences) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Info objects. +func (m *Info) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Contact != nil { + _, err := m.Contact.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.License != nil { + _, err := m.License.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ItemsItem objects. +func (m *ItemsItem) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.SchemaOrReference { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside License objects. +func (m *License) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Link objects. +func (m *Link) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Parameters != nil { + _, err := m.Parameters.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.RequestBody != nil { + _, err := m.RequestBody.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Server != nil { + _, err := m.Server.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside LinkOrReference objects. +func (m *LinkOrReference) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*LinkOrReference_Link) + if ok { + _, err := p.Link.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*LinkOrReference_Reference) + if ok { + _, err := p.Reference.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside LinksOrReferences objects. +func (m *LinksOrReferences) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside MediaType objects. +func (m *MediaType) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Schema != nil { + _, err := m.Schema.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Example != nil { + _, err := m.Example.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Examples != nil { + _, err := m.Examples.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Encoding != nil { + _, err := m.Encoding.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside MediaTypes objects. +func (m *MediaTypes) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedAny objects. +func (m *NamedAny) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedAnyOrExpression objects. +func (m *NamedAnyOrExpression) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedCallbackOrReference objects. +func (m *NamedCallbackOrReference) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedEncoding objects. +func (m *NamedEncoding) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedExampleOrReference objects. +func (m *NamedExampleOrReference) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedHeaderOrReference objects. +func (m *NamedHeaderOrReference) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedLinkOrReference objects. +func (m *NamedLinkOrReference) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedMediaType objects. +func (m *NamedMediaType) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedParameterOrReference objects. +func (m *NamedParameterOrReference) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedPathItem objects. +func (m *NamedPathItem) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedRequestBodyOrReference objects. +func (m *NamedRequestBodyOrReference) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedResponseOrReference objects. +func (m *NamedResponseOrReference) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedSchemaOrReference objects. +func (m *NamedSchemaOrReference) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedSecuritySchemeOrReference objects. +func (m *NamedSecuritySchemeOrReference) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedServerVariable objects. +func (m *NamedServerVariable) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedString objects. +func (m *NamedString) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside OauthFlow objects. +func (m *OauthFlow) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Scopes != nil { + _, err := m.Scopes.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside OauthFlows objects. +func (m *OauthFlows) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Implicit != nil { + _, err := m.Implicit.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Password != nil { + _, err := m.Password.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.ClientCredentials != nil { + _, err := m.ClientCredentials.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.AuthorizationCode != nil { + _, err := m.AuthorizationCode.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Object objects. +func (m *Object) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Operation objects. +func (m *Operation) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.ExternalDocs != nil { + _, err := m.ExternalDocs.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Parameters { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + if m.RequestBody != nil { + _, err := m.RequestBody.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Responses != nil { + _, err := m.Responses.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Callbacks != nil { + _, err := m.Callbacks.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Security { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.Servers { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Parameter objects. +func (m *Parameter) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Schema != nil { + _, err := m.Schema.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Example != nil { + _, err := m.Example.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Examples != nil { + _, err := m.Examples.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Content != nil { + _, err := m.Content.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ParameterOrReference objects. +func (m *ParameterOrReference) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*ParameterOrReference_Parameter) + if ok { + _, err := p.Parameter.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*ParameterOrReference_Reference) + if ok { + _, err := p.Reference.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ParametersOrReferences objects. +func (m *ParametersOrReferences) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside PathItem objects. +func (m *PathItem) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.XRef != "" { + info, err := compiler.ReadInfoForRef(root, m.XRef) + if err != nil { + return nil, err + } + if info != nil { + replacement, err := NewPathItem(info, nil) + if err == nil { + *m = *replacement + return m.ResolveReferences(root) + } + } + return info, nil + } + if m.Get != nil { + _, err := m.Get.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Put != nil { + _, err := m.Put.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Post != nil { + _, err := m.Post.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Delete != nil { + _, err := m.Delete.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Options != nil { + _, err := m.Options.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Head != nil { + _, err := m.Head.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Patch != nil { + _, err := m.Patch.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Trace != nil { + _, err := m.Trace.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Servers { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.Parameters { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Paths objects. +func (m *Paths) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.Path { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Properties objects. +func (m *Properties) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Reference objects. +func (m *Reference) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.XRef != "" { + info, err := compiler.ReadInfoForRef(root, m.XRef) + if err != nil { + return nil, err + } + return info, nil + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside RequestBodiesOrReferences objects. +func (m *RequestBodiesOrReferences) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside RequestBody objects. +func (m *RequestBody) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Content != nil { + _, err := m.Content.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside RequestBodyOrReference objects. +func (m *RequestBodyOrReference) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*RequestBodyOrReference_RequestBody) + if ok { + _, err := p.RequestBody.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*RequestBodyOrReference_Reference) + if ok { + _, err := p.Reference.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Response objects. +func (m *Response) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Headers != nil { + _, err := m.Headers.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Content != nil { + _, err := m.Content.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Links != nil { + _, err := m.Links.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ResponseOrReference objects. +func (m *ResponseOrReference) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*ResponseOrReference_Response) + if ok { + _, err := p.Response.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*ResponseOrReference_Reference) + if ok { + _, err := p.Reference.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Responses objects. +func (m *Responses) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Default != nil { + _, err := m.Default.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.ResponseOrReference { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ResponsesOrReferences objects. +func (m *ResponsesOrReferences) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Schema objects. +func (m *Schema) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Discriminator != nil { + _, err := m.Discriminator.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Xml != nil { + _, err := m.Xml.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.ExternalDocs != nil { + _, err := m.ExternalDocs.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Example != nil { + _, err := m.Example.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Enum { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.AllOf { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.OneOf { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.AnyOf { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + if m.Not != nil { + _, err := m.Not.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Items != nil { + _, err := m.Items.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Properties != nil { + _, err := m.Properties.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.AdditionalProperties != nil { + _, err := m.AdditionalProperties.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Default != nil { + _, err := m.Default.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside SchemaOrReference objects. +func (m *SchemaOrReference) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*SchemaOrReference_Schema) + if ok { + _, err := p.Schema.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*SchemaOrReference_Reference) + if ok { + _, err := p.Reference.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside SchemasOrReferences objects. +func (m *SchemasOrReferences) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside SecurityRequirement objects. +func (m *SecurityRequirement) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside SecurityScheme objects. +func (m *SecurityScheme) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Flows != nil { + _, err := m.Flows.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside SecuritySchemeOrReference objects. +func (m *SecuritySchemeOrReference) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*SecuritySchemeOrReference_SecurityScheme) + if ok { + _, err := p.SecurityScheme.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*SecuritySchemeOrReference_Reference) + if ok { + _, err := p.Reference.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside SecuritySchemesOrReferences objects. +func (m *SecuritySchemesOrReferences) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Server objects. +func (m *Server) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Variables != nil { + _, err := m.Variables.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ServerVariable objects. +func (m *ServerVariable) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ServerVariables objects. +func (m *ServerVariables) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside SpecificationExtension objects. +func (m *SpecificationExtension) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside StringArray objects. +func (m *StringArray) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Strings objects. +func (m *Strings) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Tag objects. +func (m *Tag) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.ExternalDocs != nil { + _, err := m.ExternalDocs.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Xml objects. +func (m *Xml) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ToRawInfo returns a description of AdditionalPropertiesItem suitable for JSON or YAML export. +func (m *AdditionalPropertiesItem) ToRawInfo() interface{} { + // ONE OF WRAPPER + // AdditionalPropertiesItem + // {Name:schemaOrReference Type:SchemaOrReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetSchemaOrReference() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:boolean Type:bool StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if v1, ok := m.GetOneof().(*AdditionalPropertiesItem_Boolean); ok { + return v1.Boolean + } + return nil +} + +// ToRawInfo returns a description of Any suitable for JSON or YAML export. +func (m *Any) ToRawInfo() interface{} { + var err error + var info1 []yaml.MapSlice + err = yaml.Unmarshal([]byte(m.Yaml), &info1) + if err == nil { + return info1 + } + var info2 yaml.MapSlice + err = yaml.Unmarshal([]byte(m.Yaml), &info2) + if err == nil { + return info2 + } + var info3 interface{} + err = yaml.Unmarshal([]byte(m.Yaml), &info3) + if err == nil { + return info3 + } + return nil +} + +// ToRawInfo returns a description of AnyOrExpression suitable for JSON or YAML export. +func (m *AnyOrExpression) ToRawInfo() interface{} { + // ONE OF WRAPPER + // AnyOrExpression + // {Name:any Type:Any StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetAny() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:expression Type:Expression StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetExpression() + if v1 != nil { + return v1.ToRawInfo() + } + return nil +} + +// ToRawInfo returns a description of AnysOrExpressions suitable for JSON or YAML export. +func (m *AnysOrExpressions) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:additionalProperties Type:NamedAnyOrExpression StringEnumValues:[] MapType:AnyOrExpression Repeated:true Pattern: Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Callback suitable for JSON or YAML export. +func (m *Callback) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Path != nil { + for _, item := range m.Path { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:Path Type:NamedPathItem StringEnumValues:[] MapType:PathItem Repeated:true Pattern:^ Implicit:true Description:} + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:SpecificationExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of CallbackOrReference suitable for JSON or YAML export. +func (m *CallbackOrReference) ToRawInfo() interface{} { + // ONE OF WRAPPER + // CallbackOrReference + // {Name:callback Type:Callback StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetCallback() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:reference Type:Reference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetReference() + if v1 != nil { + return v1.ToRawInfo() + } + return nil +} + +// ToRawInfo returns a description of CallbacksOrReferences suitable for JSON or YAML export. +func (m *CallbacksOrReferences) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:additionalProperties Type:NamedCallbackOrReference StringEnumValues:[] MapType:CallbackOrReference Repeated:true Pattern: Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Components suitable for JSON or YAML export. +func (m *Components) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Schemas != nil { + info = append(info, yaml.MapItem{"schemas", m.Schemas.ToRawInfo()}) + } + // &{Name:schemas Type:SchemasOrReferences StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Responses != nil { + info = append(info, yaml.MapItem{"responses", m.Responses.ToRawInfo()}) + } + // &{Name:responses Type:ResponsesOrReferences StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Parameters != nil { + info = append(info, yaml.MapItem{"parameters", m.Parameters.ToRawInfo()}) + } + // &{Name:parameters Type:ParametersOrReferences StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Examples != nil { + info = append(info, yaml.MapItem{"examples", m.Examples.ToRawInfo()}) + } + // &{Name:examples Type:ExamplesOrReferences StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.RequestBodies != nil { + info = append(info, yaml.MapItem{"requestBodies", m.RequestBodies.ToRawInfo()}) + } + // &{Name:requestBodies Type:RequestBodiesOrReferences StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Headers != nil { + info = append(info, yaml.MapItem{"headers", m.Headers.ToRawInfo()}) + } + // &{Name:headers Type:HeadersOrReferences StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.SecuritySchemes != nil { + info = append(info, yaml.MapItem{"securitySchemes", m.SecuritySchemes.ToRawInfo()}) + } + // &{Name:securitySchemes Type:SecuritySchemesOrReferences StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Links != nil { + info = append(info, yaml.MapItem{"links", m.Links.ToRawInfo()}) + } + // &{Name:links Type:LinksOrReferences StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Callbacks != nil { + info = append(info, yaml.MapItem{"callbacks", m.Callbacks.ToRawInfo()}) + } + // &{Name:callbacks Type:CallbacksOrReferences StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:SpecificationExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Contact suitable for JSON or YAML export. +func (m *Contact) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + if m.Url != "" { + info = append(info, yaml.MapItem{"url", m.Url}) + } + if m.Email != "" { + info = append(info, yaml.MapItem{"email", m.Email}) + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:SpecificationExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of DefaultType suitable for JSON or YAML export. +func (m *DefaultType) ToRawInfo() interface{} { + // ONE OF WRAPPER + // DefaultType + // {Name:number Type:float StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if v0, ok := m.GetOneof().(*DefaultType_Number); ok { + return v0.Number + } + // {Name:boolean Type:bool StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if v1, ok := m.GetOneof().(*DefaultType_Boolean); ok { + return v1.Boolean + } + // {Name:string Type:string StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if v2, ok := m.GetOneof().(*DefaultType_String_); ok { + return v2.String_ + } + return nil +} + +// ToRawInfo returns a description of Discriminator suitable for JSON or YAML export. +func (m *Discriminator) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.PropertyName != "" { + info = append(info, yaml.MapItem{"propertyName", m.PropertyName}) + } + if m.Mapping != nil { + info = append(info, yaml.MapItem{"mapping", m.Mapping.ToRawInfo()}) + } + // &{Name:mapping Type:Strings StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + return info +} + +// ToRawInfo returns a description of Document suitable for JSON or YAML export. +func (m *Document) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Openapi != "" { + info = append(info, yaml.MapItem{"openapi", m.Openapi}) + } + if m.Info != nil { + info = append(info, yaml.MapItem{"info", m.Info.ToRawInfo()}) + } + // &{Name:info Type:Info StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if len(m.Servers) != 0 { + items := make([]interface{}, 0) + for _, item := range m.Servers { + items = append(items, item.ToRawInfo()) + } + info = append(info, yaml.MapItem{"servers", items}) + } + // &{Name:servers Type:Server StringEnumValues:[] MapType: Repeated:true Pattern: Implicit:false Description:} + if m.Paths != nil { + info = append(info, yaml.MapItem{"paths", m.Paths.ToRawInfo()}) + } + // &{Name:paths Type:Paths StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Components != nil { + info = append(info, yaml.MapItem{"components", m.Components.ToRawInfo()}) + } + // &{Name:components Type:Components StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if len(m.Security) != 0 { + items := make([]interface{}, 0) + for _, item := range m.Security { + items = append(items, item.ToRawInfo()) + } + info = append(info, yaml.MapItem{"security", items}) + } + // &{Name:security Type:SecurityRequirement StringEnumValues:[] MapType: Repeated:true Pattern: Implicit:false Description:} + if len(m.Tags) != 0 { + items := make([]interface{}, 0) + for _, item := range m.Tags { + items = append(items, item.ToRawInfo()) + } + info = append(info, yaml.MapItem{"tags", items}) + } + // &{Name:tags Type:Tag StringEnumValues:[] MapType: Repeated:true Pattern: Implicit:false Description:} + if m.ExternalDocs != nil { + info = append(info, yaml.MapItem{"externalDocs", m.ExternalDocs.ToRawInfo()}) + } + // &{Name:externalDocs Type:ExternalDocs StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:SpecificationExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Encoding suitable for JSON or YAML export. +func (m *Encoding) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.ContentType != "" { + info = append(info, yaml.MapItem{"contentType", m.ContentType}) + } + if m.Headers != nil { + info = append(info, yaml.MapItem{"headers", m.Headers.ToRawInfo()}) + } + // &{Name:headers Type:HeadersOrReferences StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Style != "" { + info = append(info, yaml.MapItem{"style", m.Style}) + } + if m.Explode != false { + info = append(info, yaml.MapItem{"explode", m.Explode}) + } + if m.AllowReserved != false { + info = append(info, yaml.MapItem{"allowReserved", m.AllowReserved}) + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:SpecificationExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Encodings suitable for JSON or YAML export. +func (m *Encodings) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:additionalProperties Type:NamedEncoding StringEnumValues:[] MapType:Encoding Repeated:true Pattern: Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Example suitable for JSON or YAML export. +func (m *Example) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Summary != "" { + info = append(info, yaml.MapItem{"summary", m.Summary}) + } + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + // &{Name:value Type:Any StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.ExternalValue != "" { + info = append(info, yaml.MapItem{"externalValue", m.ExternalValue}) + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:SpecificationExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of ExampleOrReference suitable for JSON or YAML export. +func (m *ExampleOrReference) ToRawInfo() interface{} { + // ONE OF WRAPPER + // ExampleOrReference + // {Name:example Type:Example StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetExample() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:reference Type:Reference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetReference() + if v1 != nil { + return v1.ToRawInfo() + } + return nil +} + +// ToRawInfo returns a description of Examples suitable for JSON or YAML export. +func (m *Examples) ToRawInfo() interface{} { + info := yaml.MapSlice{} + return info +} + +// ToRawInfo returns a description of ExamplesOrReferences suitable for JSON or YAML export. +func (m *ExamplesOrReferences) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:additionalProperties Type:NamedExampleOrReference StringEnumValues:[] MapType:ExampleOrReference Repeated:true Pattern: Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Expression suitable for JSON or YAML export. +func (m *Expression) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:additionalProperties Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern: Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of ExternalDocs suitable for JSON or YAML export. +func (m *ExternalDocs) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.Url != "" { + info = append(info, yaml.MapItem{"url", m.Url}) + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:SpecificationExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Header suitable for JSON or YAML export. +func (m *Header) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.Required != false { + info = append(info, yaml.MapItem{"required", m.Required}) + } + if m.Deprecated != false { + info = append(info, yaml.MapItem{"deprecated", m.Deprecated}) + } + if m.AllowEmptyValue != false { + info = append(info, yaml.MapItem{"allowEmptyValue", m.AllowEmptyValue}) + } + if m.Style != "" { + info = append(info, yaml.MapItem{"style", m.Style}) + } + if m.Explode != false { + info = append(info, yaml.MapItem{"explode", m.Explode}) + } + if m.AllowReserved != false { + info = append(info, yaml.MapItem{"allowReserved", m.AllowReserved}) + } + if m.Schema != nil { + info = append(info, yaml.MapItem{"schema", m.Schema.ToRawInfo()}) + } + // &{Name:schema Type:SchemaOrReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Example != nil { + info = append(info, yaml.MapItem{"example", m.Example.ToRawInfo()}) + } + // &{Name:example Type:Any StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Examples != nil { + info = append(info, yaml.MapItem{"examples", m.Examples.ToRawInfo()}) + } + // &{Name:examples Type:ExamplesOrReferences StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Content != nil { + info = append(info, yaml.MapItem{"content", m.Content.ToRawInfo()}) + } + // &{Name:content Type:MediaTypes StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:SpecificationExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of HeaderOrReference suitable for JSON or YAML export. +func (m *HeaderOrReference) ToRawInfo() interface{} { + // ONE OF WRAPPER + // HeaderOrReference + // {Name:header Type:Header StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetHeader() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:reference Type:Reference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetReference() + if v1 != nil { + return v1.ToRawInfo() + } + return nil +} + +// ToRawInfo returns a description of HeadersOrReferences suitable for JSON or YAML export. +func (m *HeadersOrReferences) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:additionalProperties Type:NamedHeaderOrReference StringEnumValues:[] MapType:HeaderOrReference Repeated:true Pattern: Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Info suitable for JSON or YAML export. +func (m *Info) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Title != "" { + info = append(info, yaml.MapItem{"title", m.Title}) + } + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.TermsOfService != "" { + info = append(info, yaml.MapItem{"termsOfService", m.TermsOfService}) + } + if m.Contact != nil { + info = append(info, yaml.MapItem{"contact", m.Contact.ToRawInfo()}) + } + // &{Name:contact Type:Contact StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.License != nil { + info = append(info, yaml.MapItem{"license", m.License.ToRawInfo()}) + } + // &{Name:license Type:License StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Version != "" { + info = append(info, yaml.MapItem{"version", m.Version}) + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:SpecificationExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of ItemsItem suitable for JSON or YAML export. +func (m *ItemsItem) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if len(m.SchemaOrReference) != 0 { + items := make([]interface{}, 0) + for _, item := range m.SchemaOrReference { + items = append(items, item.ToRawInfo()) + } + info = append(info, yaml.MapItem{"schemaOrReference", items}) + } + // &{Name:schemaOrReference Type:SchemaOrReference StringEnumValues:[] MapType: Repeated:true Pattern: Implicit:false Description:} + return info +} + +// ToRawInfo returns a description of License suitable for JSON or YAML export. +func (m *License) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + if m.Url != "" { + info = append(info, yaml.MapItem{"url", m.Url}) + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:SpecificationExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Link suitable for JSON or YAML export. +func (m *Link) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.OperationRef != "" { + info = append(info, yaml.MapItem{"operationRef", m.OperationRef}) + } + if m.OperationId != "" { + info = append(info, yaml.MapItem{"operationId", m.OperationId}) + } + if m.Parameters != nil { + info = append(info, yaml.MapItem{"parameters", m.Parameters.ToRawInfo()}) + } + // &{Name:parameters Type:AnysOrExpressions StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.RequestBody != nil { + info = append(info, yaml.MapItem{"requestBody", m.RequestBody.ToRawInfo()}) + } + // &{Name:requestBody Type:AnyOrExpression StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.Server != nil { + info = append(info, yaml.MapItem{"server", m.Server.ToRawInfo()}) + } + // &{Name:server Type:Server StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:SpecificationExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of LinkOrReference suitable for JSON or YAML export. +func (m *LinkOrReference) ToRawInfo() interface{} { + // ONE OF WRAPPER + // LinkOrReference + // {Name:link Type:Link StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetLink() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:reference Type:Reference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetReference() + if v1 != nil { + return v1.ToRawInfo() + } + return nil +} + +// ToRawInfo returns a description of LinksOrReferences suitable for JSON or YAML export. +func (m *LinksOrReferences) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:additionalProperties Type:NamedLinkOrReference StringEnumValues:[] MapType:LinkOrReference Repeated:true Pattern: Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of MediaType suitable for JSON or YAML export. +func (m *MediaType) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Schema != nil { + info = append(info, yaml.MapItem{"schema", m.Schema.ToRawInfo()}) + } + // &{Name:schema Type:SchemaOrReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Example != nil { + info = append(info, yaml.MapItem{"example", m.Example.ToRawInfo()}) + } + // &{Name:example Type:Any StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Examples != nil { + info = append(info, yaml.MapItem{"examples", m.Examples.ToRawInfo()}) + } + // &{Name:examples Type:ExamplesOrReferences StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Encoding != nil { + info = append(info, yaml.MapItem{"encoding", m.Encoding.ToRawInfo()}) + } + // &{Name:encoding Type:Encodings StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:SpecificationExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of MediaTypes suitable for JSON or YAML export. +func (m *MediaTypes) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:additionalProperties Type:NamedMediaType StringEnumValues:[] MapType:MediaType Repeated:true Pattern: Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of NamedAny suitable for JSON or YAML export. +func (m *NamedAny) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + // &{Name:value Type:Any StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedAnyOrExpression suitable for JSON or YAML export. +func (m *NamedAnyOrExpression) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + // &{Name:value Type:AnyOrExpression StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedCallbackOrReference suitable for JSON or YAML export. +func (m *NamedCallbackOrReference) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + // &{Name:value Type:CallbackOrReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedEncoding suitable for JSON or YAML export. +func (m *NamedEncoding) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + // &{Name:value Type:Encoding StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedExampleOrReference suitable for JSON or YAML export. +func (m *NamedExampleOrReference) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + // &{Name:value Type:ExampleOrReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedHeaderOrReference suitable for JSON or YAML export. +func (m *NamedHeaderOrReference) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + // &{Name:value Type:HeaderOrReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedLinkOrReference suitable for JSON or YAML export. +func (m *NamedLinkOrReference) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + // &{Name:value Type:LinkOrReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedMediaType suitable for JSON or YAML export. +func (m *NamedMediaType) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + // &{Name:value Type:MediaType StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedParameterOrReference suitable for JSON or YAML export. +func (m *NamedParameterOrReference) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + // &{Name:value Type:ParameterOrReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedPathItem suitable for JSON or YAML export. +func (m *NamedPathItem) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + // &{Name:value Type:PathItem StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedRequestBodyOrReference suitable for JSON or YAML export. +func (m *NamedRequestBodyOrReference) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + // &{Name:value Type:RequestBodyOrReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedResponseOrReference suitable for JSON or YAML export. +func (m *NamedResponseOrReference) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + // &{Name:value Type:ResponseOrReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedSchemaOrReference suitable for JSON or YAML export. +func (m *NamedSchemaOrReference) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + // &{Name:value Type:SchemaOrReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedSecuritySchemeOrReference suitable for JSON or YAML export. +func (m *NamedSecuritySchemeOrReference) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + // &{Name:value Type:SecuritySchemeOrReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedServerVariable suitable for JSON or YAML export. +func (m *NamedServerVariable) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + // &{Name:value Type:ServerVariable StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedString suitable for JSON or YAML export. +func (m *NamedString) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + if m.Value != "" { + info = append(info, yaml.MapItem{"value", m.Value}) + } + return info +} + +// ToRawInfo returns a description of OauthFlow suitable for JSON or YAML export. +func (m *OauthFlow) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.AuthorizationUrl != "" { + info = append(info, yaml.MapItem{"authorizationUrl", m.AuthorizationUrl}) + } + if m.TokenUrl != "" { + info = append(info, yaml.MapItem{"tokenUrl", m.TokenUrl}) + } + if m.RefreshUrl != "" { + info = append(info, yaml.MapItem{"refreshUrl", m.RefreshUrl}) + } + if m.Scopes != nil { + info = append(info, yaml.MapItem{"scopes", m.Scopes.ToRawInfo()}) + } + // &{Name:scopes Type:Strings StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:SpecificationExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of OauthFlows suitable for JSON or YAML export. +func (m *OauthFlows) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Implicit != nil { + info = append(info, yaml.MapItem{"implicit", m.Implicit.ToRawInfo()}) + } + // &{Name:implicit Type:OauthFlow StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Password != nil { + info = append(info, yaml.MapItem{"password", m.Password.ToRawInfo()}) + } + // &{Name:password Type:OauthFlow StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.ClientCredentials != nil { + info = append(info, yaml.MapItem{"clientCredentials", m.ClientCredentials.ToRawInfo()}) + } + // &{Name:clientCredentials Type:OauthFlow StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.AuthorizationCode != nil { + info = append(info, yaml.MapItem{"authorizationCode", m.AuthorizationCode.ToRawInfo()}) + } + // &{Name:authorizationCode Type:OauthFlow StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:SpecificationExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Object suitable for JSON or YAML export. +func (m *Object) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:additionalProperties Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern: Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Operation suitable for JSON or YAML export. +func (m *Operation) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if len(m.Tags) != 0 { + info = append(info, yaml.MapItem{"tags", m.Tags}) + } + if m.Summary != "" { + info = append(info, yaml.MapItem{"summary", m.Summary}) + } + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.ExternalDocs != nil { + info = append(info, yaml.MapItem{"externalDocs", m.ExternalDocs.ToRawInfo()}) + } + // &{Name:externalDocs Type:ExternalDocs StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.OperationId != "" { + info = append(info, yaml.MapItem{"operationId", m.OperationId}) + } + if len(m.Parameters) != 0 { + items := make([]interface{}, 0) + for _, item := range m.Parameters { + items = append(items, item.ToRawInfo()) + } + info = append(info, yaml.MapItem{"parameters", items}) + } + // &{Name:parameters Type:ParameterOrReference StringEnumValues:[] MapType: Repeated:true Pattern: Implicit:false Description:} + if m.RequestBody != nil { + info = append(info, yaml.MapItem{"requestBody", m.RequestBody.ToRawInfo()}) + } + // &{Name:requestBody Type:RequestBodyOrReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Responses != nil { + info = append(info, yaml.MapItem{"responses", m.Responses.ToRawInfo()}) + } + // &{Name:responses Type:Responses StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Callbacks != nil { + info = append(info, yaml.MapItem{"callbacks", m.Callbacks.ToRawInfo()}) + } + // &{Name:callbacks Type:CallbacksOrReferences StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Deprecated != false { + info = append(info, yaml.MapItem{"deprecated", m.Deprecated}) + } + if len(m.Security) != 0 { + items := make([]interface{}, 0) + for _, item := range m.Security { + items = append(items, item.ToRawInfo()) + } + info = append(info, yaml.MapItem{"security", items}) + } + // &{Name:security Type:SecurityRequirement StringEnumValues:[] MapType: Repeated:true Pattern: Implicit:false Description:} + if len(m.Servers) != 0 { + items := make([]interface{}, 0) + for _, item := range m.Servers { + items = append(items, item.ToRawInfo()) + } + info = append(info, yaml.MapItem{"servers", items}) + } + // &{Name:servers Type:Server StringEnumValues:[] MapType: Repeated:true Pattern: Implicit:false Description:} + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:SpecificationExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Parameter suitable for JSON or YAML export. +func (m *Parameter) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + if m.In != "" { + info = append(info, yaml.MapItem{"in", m.In}) + } + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.Required != false { + info = append(info, yaml.MapItem{"required", m.Required}) + } + if m.Deprecated != false { + info = append(info, yaml.MapItem{"deprecated", m.Deprecated}) + } + if m.AllowEmptyValue != false { + info = append(info, yaml.MapItem{"allowEmptyValue", m.AllowEmptyValue}) + } + if m.Style != "" { + info = append(info, yaml.MapItem{"style", m.Style}) + } + if m.Explode != false { + info = append(info, yaml.MapItem{"explode", m.Explode}) + } + if m.AllowReserved != false { + info = append(info, yaml.MapItem{"allowReserved", m.AllowReserved}) + } + if m.Schema != nil { + info = append(info, yaml.MapItem{"schema", m.Schema.ToRawInfo()}) + } + // &{Name:schema Type:SchemaOrReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Example != nil { + info = append(info, yaml.MapItem{"example", m.Example.ToRawInfo()}) + } + // &{Name:example Type:Any StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Examples != nil { + info = append(info, yaml.MapItem{"examples", m.Examples.ToRawInfo()}) + } + // &{Name:examples Type:ExamplesOrReferences StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Content != nil { + info = append(info, yaml.MapItem{"content", m.Content.ToRawInfo()}) + } + // &{Name:content Type:MediaTypes StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:SpecificationExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of ParameterOrReference suitable for JSON or YAML export. +func (m *ParameterOrReference) ToRawInfo() interface{} { + // ONE OF WRAPPER + // ParameterOrReference + // {Name:parameter Type:Parameter StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetParameter() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:reference Type:Reference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetReference() + if v1 != nil { + return v1.ToRawInfo() + } + return nil +} + +// ToRawInfo returns a description of ParametersOrReferences suitable for JSON or YAML export. +func (m *ParametersOrReferences) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:additionalProperties Type:NamedParameterOrReference StringEnumValues:[] MapType:ParameterOrReference Repeated:true Pattern: Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of PathItem suitable for JSON or YAML export. +func (m *PathItem) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.XRef != "" { + info = append(info, yaml.MapItem{"$ref", m.XRef}) + } + if m.Summary != "" { + info = append(info, yaml.MapItem{"summary", m.Summary}) + } + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.Get != nil { + info = append(info, yaml.MapItem{"get", m.Get.ToRawInfo()}) + } + // &{Name:get Type:Operation StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Put != nil { + info = append(info, yaml.MapItem{"put", m.Put.ToRawInfo()}) + } + // &{Name:put Type:Operation StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Post != nil { + info = append(info, yaml.MapItem{"post", m.Post.ToRawInfo()}) + } + // &{Name:post Type:Operation StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Delete != nil { + info = append(info, yaml.MapItem{"delete", m.Delete.ToRawInfo()}) + } + // &{Name:delete Type:Operation StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Options != nil { + info = append(info, yaml.MapItem{"options", m.Options.ToRawInfo()}) + } + // &{Name:options Type:Operation StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Head != nil { + info = append(info, yaml.MapItem{"head", m.Head.ToRawInfo()}) + } + // &{Name:head Type:Operation StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Patch != nil { + info = append(info, yaml.MapItem{"patch", m.Patch.ToRawInfo()}) + } + // &{Name:patch Type:Operation StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Trace != nil { + info = append(info, yaml.MapItem{"trace", m.Trace.ToRawInfo()}) + } + // &{Name:trace Type:Operation StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if len(m.Servers) != 0 { + items := make([]interface{}, 0) + for _, item := range m.Servers { + items = append(items, item.ToRawInfo()) + } + info = append(info, yaml.MapItem{"servers", items}) + } + // &{Name:servers Type:Server StringEnumValues:[] MapType: Repeated:true Pattern: Implicit:false Description:} + if len(m.Parameters) != 0 { + items := make([]interface{}, 0) + for _, item := range m.Parameters { + items = append(items, item.ToRawInfo()) + } + info = append(info, yaml.MapItem{"parameters", items}) + } + // &{Name:parameters Type:ParameterOrReference StringEnumValues:[] MapType: Repeated:true Pattern: Implicit:false Description:} + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:SpecificationExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Paths suitable for JSON or YAML export. +func (m *Paths) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Path != nil { + for _, item := range m.Path { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:Path Type:NamedPathItem StringEnumValues:[] MapType:PathItem Repeated:true Pattern:^/ Implicit:true Description:} + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:SpecificationExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Properties suitable for JSON or YAML export. +func (m *Properties) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:additionalProperties Type:NamedSchemaOrReference StringEnumValues:[] MapType:SchemaOrReference Repeated:true Pattern: Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Reference suitable for JSON or YAML export. +func (m *Reference) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.XRef != "" { + info = append(info, yaml.MapItem{"$ref", m.XRef}) + } + return info +} + +// ToRawInfo returns a description of RequestBodiesOrReferences suitable for JSON or YAML export. +func (m *RequestBodiesOrReferences) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:additionalProperties Type:NamedRequestBodyOrReference StringEnumValues:[] MapType:RequestBodyOrReference Repeated:true Pattern: Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of RequestBody suitable for JSON or YAML export. +func (m *RequestBody) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.Content != nil { + info = append(info, yaml.MapItem{"content", m.Content.ToRawInfo()}) + } + // &{Name:content Type:MediaTypes StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Required != false { + info = append(info, yaml.MapItem{"required", m.Required}) + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:SpecificationExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of RequestBodyOrReference suitable for JSON or YAML export. +func (m *RequestBodyOrReference) ToRawInfo() interface{} { + // ONE OF WRAPPER + // RequestBodyOrReference + // {Name:requestBody Type:RequestBody StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetRequestBody() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:reference Type:Reference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetReference() + if v1 != nil { + return v1.ToRawInfo() + } + return nil +} + +// ToRawInfo returns a description of Response suitable for JSON or YAML export. +func (m *Response) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.Headers != nil { + info = append(info, yaml.MapItem{"headers", m.Headers.ToRawInfo()}) + } + // &{Name:headers Type:HeadersOrReferences StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Content != nil { + info = append(info, yaml.MapItem{"content", m.Content.ToRawInfo()}) + } + // &{Name:content Type:MediaTypes StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Links != nil { + info = append(info, yaml.MapItem{"links", m.Links.ToRawInfo()}) + } + // &{Name:links Type:LinksOrReferences StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:SpecificationExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of ResponseOrReference suitable for JSON or YAML export. +func (m *ResponseOrReference) ToRawInfo() interface{} { + // ONE OF WRAPPER + // ResponseOrReference + // {Name:response Type:Response StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetResponse() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:reference Type:Reference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetReference() + if v1 != nil { + return v1.ToRawInfo() + } + return nil +} + +// ToRawInfo returns a description of Responses suitable for JSON or YAML export. +func (m *Responses) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Default != nil { + info = append(info, yaml.MapItem{"default", m.Default.ToRawInfo()}) + } + // &{Name:default Type:ResponseOrReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.ResponseOrReference != nil { + for _, item := range m.ResponseOrReference { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:ResponseOrReference Type:NamedResponseOrReference StringEnumValues:[] MapType:ResponseOrReference Repeated:true Pattern:^([0-9X]{3})$ Implicit:true Description:} + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:SpecificationExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of ResponsesOrReferences suitable for JSON or YAML export. +func (m *ResponsesOrReferences) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:additionalProperties Type:NamedResponseOrReference StringEnumValues:[] MapType:ResponseOrReference Repeated:true Pattern: Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Schema suitable for JSON or YAML export. +func (m *Schema) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Nullable != false { + info = append(info, yaml.MapItem{"nullable", m.Nullable}) + } + if m.Discriminator != nil { + info = append(info, yaml.MapItem{"discriminator", m.Discriminator.ToRawInfo()}) + } + // &{Name:discriminator Type:Discriminator StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.ReadOnly != false { + info = append(info, yaml.MapItem{"readOnly", m.ReadOnly}) + } + if m.WriteOnly != false { + info = append(info, yaml.MapItem{"writeOnly", m.WriteOnly}) + } + if m.Xml != nil { + info = append(info, yaml.MapItem{"xml", m.Xml.ToRawInfo()}) + } + // &{Name:xml Type:Xml StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.ExternalDocs != nil { + info = append(info, yaml.MapItem{"externalDocs", m.ExternalDocs.ToRawInfo()}) + } + // &{Name:externalDocs Type:ExternalDocs StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Example != nil { + info = append(info, yaml.MapItem{"example", m.Example.ToRawInfo()}) + } + // &{Name:example Type:Any StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Deprecated != false { + info = append(info, yaml.MapItem{"deprecated", m.Deprecated}) + } + if m.Title != "" { + info = append(info, yaml.MapItem{"title", m.Title}) + } + if m.MultipleOf != 0.0 { + info = append(info, yaml.MapItem{"multipleOf", m.MultipleOf}) + } + if m.Maximum != 0.0 { + info = append(info, yaml.MapItem{"maximum", m.Maximum}) + } + if m.ExclusiveMaximum != false { + info = append(info, yaml.MapItem{"exclusiveMaximum", m.ExclusiveMaximum}) + } + if m.Minimum != 0.0 { + info = append(info, yaml.MapItem{"minimum", m.Minimum}) + } + if m.ExclusiveMinimum != false { + info = append(info, yaml.MapItem{"exclusiveMinimum", m.ExclusiveMinimum}) + } + if m.MaxLength != 0 { + info = append(info, yaml.MapItem{"maxLength", m.MaxLength}) + } + if m.MinLength != 0 { + info = append(info, yaml.MapItem{"minLength", m.MinLength}) + } + if m.Pattern != "" { + info = append(info, yaml.MapItem{"pattern", m.Pattern}) + } + if m.MaxItems != 0 { + info = append(info, yaml.MapItem{"maxItems", m.MaxItems}) + } + if m.MinItems != 0 { + info = append(info, yaml.MapItem{"minItems", m.MinItems}) + } + if m.UniqueItems != false { + info = append(info, yaml.MapItem{"uniqueItems", m.UniqueItems}) + } + if m.MaxProperties != 0 { + info = append(info, yaml.MapItem{"maxProperties", m.MaxProperties}) + } + if m.MinProperties != 0 { + info = append(info, yaml.MapItem{"minProperties", m.MinProperties}) + } + if len(m.Required) != 0 { + info = append(info, yaml.MapItem{"required", m.Required}) + } + if len(m.Enum) != 0 { + items := make([]interface{}, 0) + for _, item := range m.Enum { + items = append(items, item.ToRawInfo()) + } + info = append(info, yaml.MapItem{"enum", items}) + } + // &{Name:enum Type:Any StringEnumValues:[] MapType: Repeated:true Pattern: Implicit:false Description:} + if m.Type != "" { + info = append(info, yaml.MapItem{"type", m.Type}) + } + if len(m.AllOf) != 0 { + items := make([]interface{}, 0) + for _, item := range m.AllOf { + items = append(items, item.ToRawInfo()) + } + info = append(info, yaml.MapItem{"allOf", items}) + } + // &{Name:allOf Type:SchemaOrReference StringEnumValues:[] MapType: Repeated:true Pattern: Implicit:false Description:} + if len(m.OneOf) != 0 { + items := make([]interface{}, 0) + for _, item := range m.OneOf { + items = append(items, item.ToRawInfo()) + } + info = append(info, yaml.MapItem{"oneOf", items}) + } + // &{Name:oneOf Type:SchemaOrReference StringEnumValues:[] MapType: Repeated:true Pattern: Implicit:false Description:} + if len(m.AnyOf) != 0 { + items := make([]interface{}, 0) + for _, item := range m.AnyOf { + items = append(items, item.ToRawInfo()) + } + info = append(info, yaml.MapItem{"anyOf", items}) + } + // &{Name:anyOf Type:SchemaOrReference StringEnumValues:[] MapType: Repeated:true Pattern: Implicit:false Description:} + if m.Not != nil { + info = append(info, yaml.MapItem{"not", m.Not.ToRawInfo()}) + } + // &{Name:not Type:Schema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Items != nil { + items := make([]interface{}, 0) + for _, item := range m.Items.SchemaOrReference { + items = append(items, item.ToRawInfo()) + } + info = append(info, yaml.MapItem{"items", items[0]}) + } + // &{Name:items Type:ItemsItem StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Properties != nil { + info = append(info, yaml.MapItem{"properties", m.Properties.ToRawInfo()}) + } + // &{Name:properties Type:Properties StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.AdditionalProperties != nil { + info = append(info, yaml.MapItem{"additionalProperties", m.AdditionalProperties.ToRawInfo()}) + } + // &{Name:additionalProperties Type:AdditionalPropertiesItem StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Default != nil { + info = append(info, yaml.MapItem{"default", m.Default.ToRawInfo()}) + } + // &{Name:default Type:DefaultType StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.Format != "" { + info = append(info, yaml.MapItem{"format", m.Format}) + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:SpecificationExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of SchemaOrReference suitable for JSON or YAML export. +func (m *SchemaOrReference) ToRawInfo() interface{} { + // ONE OF WRAPPER + // SchemaOrReference + // {Name:schema Type:Schema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetSchema() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:reference Type:Reference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetReference() + if v1 != nil { + return v1.ToRawInfo() + } + return nil +} + +// ToRawInfo returns a description of SchemasOrReferences suitable for JSON or YAML export. +func (m *SchemasOrReferences) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:additionalProperties Type:NamedSchemaOrReference StringEnumValues:[] MapType:SchemaOrReference Repeated:true Pattern: Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of SecurityRequirement suitable for JSON or YAML export. +func (m *SecurityRequirement) ToRawInfo() interface{} { + info := yaml.MapSlice{} + return info +} + +// ToRawInfo returns a description of SecurityScheme suitable for JSON or YAML export. +func (m *SecurityScheme) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Type != "" { + info = append(info, yaml.MapItem{"type", m.Type}) + } + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + if m.In != "" { + info = append(info, yaml.MapItem{"in", m.In}) + } + if m.Scheme != "" { + info = append(info, yaml.MapItem{"scheme", m.Scheme}) + } + if m.BearerFormat != "" { + info = append(info, yaml.MapItem{"bearerFormat", m.BearerFormat}) + } + if m.Flows != nil { + info = append(info, yaml.MapItem{"flows", m.Flows.ToRawInfo()}) + } + // &{Name:flows Type:OauthFlows StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.OpenIdConnectUrl != "" { + info = append(info, yaml.MapItem{"openIdConnectUrl", m.OpenIdConnectUrl}) + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:SpecificationExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of SecuritySchemeOrReference suitable for JSON or YAML export. +func (m *SecuritySchemeOrReference) ToRawInfo() interface{} { + // ONE OF WRAPPER + // SecuritySchemeOrReference + // {Name:securityScheme Type:SecurityScheme StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetSecurityScheme() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:reference Type:Reference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetReference() + if v1 != nil { + return v1.ToRawInfo() + } + return nil +} + +// ToRawInfo returns a description of SecuritySchemesOrReferences suitable for JSON or YAML export. +func (m *SecuritySchemesOrReferences) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:additionalProperties Type:NamedSecuritySchemeOrReference StringEnumValues:[] MapType:SecuritySchemeOrReference Repeated:true Pattern: Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Server suitable for JSON or YAML export. +func (m *Server) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Url != "" { + info = append(info, yaml.MapItem{"url", m.Url}) + } + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.Variables != nil { + info = append(info, yaml.MapItem{"variables", m.Variables.ToRawInfo()}) + } + // &{Name:variables Type:ServerVariables StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:SpecificationExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of ServerVariable suitable for JSON or YAML export. +func (m *ServerVariable) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if len(m.Enum) != 0 { + info = append(info, yaml.MapItem{"enum", m.Enum}) + } + if m.Default != "" { + info = append(info, yaml.MapItem{"default", m.Default}) + } + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:SpecificationExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of ServerVariables suitable for JSON or YAML export. +func (m *ServerVariables) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:additionalProperties Type:NamedServerVariable StringEnumValues:[] MapType:ServerVariable Repeated:true Pattern: Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of SpecificationExtension suitable for JSON or YAML export. +func (m *SpecificationExtension) ToRawInfo() interface{} { + // ONE OF WRAPPER + // SpecificationExtension + // {Name:number Type:float StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if v0, ok := m.GetOneof().(*SpecificationExtension_Number); ok { + return v0.Number + } + // {Name:boolean Type:bool StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if v1, ok := m.GetOneof().(*SpecificationExtension_Boolean); ok { + return v1.Boolean + } + // {Name:string Type:string StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if v2, ok := m.GetOneof().(*SpecificationExtension_String_); ok { + return v2.String_ + } + return nil +} + +// ToRawInfo returns a description of StringArray suitable for JSON or YAML export. +func (m *StringArray) ToRawInfo() interface{} { + return m.Value +} + +// ToRawInfo returns a description of Strings suitable for JSON or YAML export. +func (m *Strings) ToRawInfo() interface{} { + info := yaml.MapSlice{} + // &{Name:additionalProperties Type:NamedString StringEnumValues:[] MapType:string Repeated:true Pattern: Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Tag suitable for JSON or YAML export. +func (m *Tag) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.ExternalDocs != nil { + info = append(info, yaml.MapItem{"externalDocs", m.ExternalDocs.ToRawInfo()}) + } + // &{Name:externalDocs Type:ExternalDocs StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:SpecificationExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Xml suitable for JSON or YAML export. +func (m *Xml) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + if m.Namespace != "" { + info = append(info, yaml.MapItem{"namespace", m.Namespace}) + } + if m.Prefix != "" { + info = append(info, yaml.MapItem{"prefix", m.Prefix}) + } + if m.Attribute != false { + info = append(info, yaml.MapItem{"attribute", m.Attribute}) + } + if m.Wrapped != false { + info = append(info, yaml.MapItem{"wrapped", m.Wrapped}) + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:SpecificationExtension Type:NamedAny StringEnumValues:[] MapType:Any Repeated:true Pattern:^x- Implicit:true Description:} + return info +} + +var ( + pattern0 = regexp.MustCompile("^") + pattern1 = regexp.MustCompile("^x-") + pattern2 = regexp.MustCompile("^/") + pattern3 = regexp.MustCompile("^([0-9X]{3})$") + pattern4 = regexp.MustCompile("^[a-zA-Z0-9\\.\\-_]+$") +) diff --git a/vendor/github.com/googleapis/gnostic/OpenAPIv3/OpenAPIv3.pb.go b/vendor/github.com/googleapis/gnostic/OpenAPIv3/OpenAPIv3.pb.go new file mode 100644 index 000000000..73aeb6f78 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/OpenAPIv3/OpenAPIv3.pb.go @@ -0,0 +1,4703 @@ +// Code generated by protoc-gen-go. +// source: OpenAPIv3/OpenAPIv3.proto +// DO NOT EDIT! + +/* +Package openapi_v3 is a generated protocol buffer package. + +It is generated from these files: + OpenAPIv3/OpenAPIv3.proto + +It has these top-level messages: + AdditionalPropertiesItem + Any + AnyOrExpression + AnysOrExpressions + Callback + CallbackOrReference + CallbacksOrReferences + Components + Contact + DefaultType + Discriminator + Document + Encoding + Encodings + Example + ExampleOrReference + Examples + ExamplesOrReferences + Expression + ExternalDocs + Header + HeaderOrReference + HeadersOrReferences + Info + ItemsItem + License + Link + LinkOrReference + LinksOrReferences + MediaType + MediaTypes + NamedAny + NamedAnyOrExpression + NamedCallbackOrReference + NamedEncoding + NamedExampleOrReference + NamedHeaderOrReference + NamedLinkOrReference + NamedMediaType + NamedParameterOrReference + NamedPathItem + NamedRequestBodyOrReference + NamedResponseOrReference + NamedSchemaOrReference + NamedSecuritySchemeOrReference + NamedServerVariable + NamedString + OauthFlow + OauthFlows + Object + Operation + Parameter + ParameterOrReference + ParametersOrReferences + PathItem + Paths + Properties + Reference + RequestBodiesOrReferences + RequestBody + RequestBodyOrReference + Response + ResponseOrReference + Responses + ResponsesOrReferences + Schema + SchemaOrReference + SchemasOrReferences + SecurityRequirement + SecurityScheme + SecuritySchemeOrReference + SecuritySchemesOrReferences + Server + ServerVariable + ServerVariables + SpecificationExtension + StringArray + Strings + Tag + Xml +*/ +package openapi_v3 + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import google_protobuf "github.com/golang/protobuf/ptypes/any" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type AdditionalPropertiesItem struct { + // Types that are valid to be assigned to Oneof: + // *AdditionalPropertiesItem_SchemaOrReference + // *AdditionalPropertiesItem_Boolean + Oneof isAdditionalPropertiesItem_Oneof `protobuf_oneof:"oneof"` +} + +func (m *AdditionalPropertiesItem) Reset() { *m = AdditionalPropertiesItem{} } +func (m *AdditionalPropertiesItem) String() string { return proto.CompactTextString(m) } +func (*AdditionalPropertiesItem) ProtoMessage() {} +func (*AdditionalPropertiesItem) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } + +type isAdditionalPropertiesItem_Oneof interface { + isAdditionalPropertiesItem_Oneof() +} + +type AdditionalPropertiesItem_SchemaOrReference struct { + SchemaOrReference *SchemaOrReference `protobuf:"bytes,1,opt,name=schema_or_reference,json=schemaOrReference,oneof"` +} +type AdditionalPropertiesItem_Boolean struct { + Boolean bool `protobuf:"varint,2,opt,name=boolean,oneof"` +} + +func (*AdditionalPropertiesItem_SchemaOrReference) isAdditionalPropertiesItem_Oneof() {} +func (*AdditionalPropertiesItem_Boolean) isAdditionalPropertiesItem_Oneof() {} + +func (m *AdditionalPropertiesItem) GetOneof() isAdditionalPropertiesItem_Oneof { + if m != nil { + return m.Oneof + } + return nil +} + +func (m *AdditionalPropertiesItem) GetSchemaOrReference() *SchemaOrReference { + if x, ok := m.GetOneof().(*AdditionalPropertiesItem_SchemaOrReference); ok { + return x.SchemaOrReference + } + return nil +} + +func (m *AdditionalPropertiesItem) GetBoolean() bool { + if x, ok := m.GetOneof().(*AdditionalPropertiesItem_Boolean); ok { + return x.Boolean + } + return false +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AdditionalPropertiesItem) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AdditionalPropertiesItem_OneofMarshaler, _AdditionalPropertiesItem_OneofUnmarshaler, _AdditionalPropertiesItem_OneofSizer, []interface{}{ + (*AdditionalPropertiesItem_SchemaOrReference)(nil), + (*AdditionalPropertiesItem_Boolean)(nil), + } +} + +func _AdditionalPropertiesItem_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AdditionalPropertiesItem) + // oneof + switch x := m.Oneof.(type) { + case *AdditionalPropertiesItem_SchemaOrReference: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SchemaOrReference); err != nil { + return err + } + case *AdditionalPropertiesItem_Boolean: + t := uint64(0) + if x.Boolean { + t = 1 + } + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(t) + case nil: + default: + return fmt.Errorf("AdditionalPropertiesItem.Oneof has unexpected type %T", x) + } + return nil +} + +func _AdditionalPropertiesItem_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AdditionalPropertiesItem) + switch tag { + case 1: // oneof.schema_or_reference + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(SchemaOrReference) + err := b.DecodeMessage(msg) + m.Oneof = &AdditionalPropertiesItem_SchemaOrReference{msg} + return true, err + case 2: // oneof.boolean + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Oneof = &AdditionalPropertiesItem_Boolean{x != 0} + return true, err + default: + return false, nil + } +} + +func _AdditionalPropertiesItem_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AdditionalPropertiesItem) + // oneof + switch x := m.Oneof.(type) { + case *AdditionalPropertiesItem_SchemaOrReference: + s := proto.Size(x.SchemaOrReference) + n += proto.SizeVarint(1<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *AdditionalPropertiesItem_Boolean: + n += proto.SizeVarint(2<<3 | proto.WireVarint) + n += 1 + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type Any struct { + Value *google_protobuf.Any `protobuf:"bytes,1,opt,name=value" json:"value,omitempty"` + Yaml string `protobuf:"bytes,2,opt,name=yaml" json:"yaml,omitempty"` +} + +func (m *Any) Reset() { *m = Any{} } +func (m *Any) String() string { return proto.CompactTextString(m) } +func (*Any) ProtoMessage() {} +func (*Any) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } + +func (m *Any) GetValue() *google_protobuf.Any { + if m != nil { + return m.Value + } + return nil +} + +func (m *Any) GetYaml() string { + if m != nil { + return m.Yaml + } + return "" +} + +type AnyOrExpression struct { + // Types that are valid to be assigned to Oneof: + // *AnyOrExpression_Any + // *AnyOrExpression_Expression + Oneof isAnyOrExpression_Oneof `protobuf_oneof:"oneof"` +} + +func (m *AnyOrExpression) Reset() { *m = AnyOrExpression{} } +func (m *AnyOrExpression) String() string { return proto.CompactTextString(m) } +func (*AnyOrExpression) ProtoMessage() {} +func (*AnyOrExpression) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } + +type isAnyOrExpression_Oneof interface { + isAnyOrExpression_Oneof() +} + +type AnyOrExpression_Any struct { + Any *Any `protobuf:"bytes,1,opt,name=any,oneof"` +} +type AnyOrExpression_Expression struct { + Expression *Expression `protobuf:"bytes,2,opt,name=expression,oneof"` +} + +func (*AnyOrExpression_Any) isAnyOrExpression_Oneof() {} +func (*AnyOrExpression_Expression) isAnyOrExpression_Oneof() {} + +func (m *AnyOrExpression) GetOneof() isAnyOrExpression_Oneof { + if m != nil { + return m.Oneof + } + return nil +} + +func (m *AnyOrExpression) GetAny() *Any { + if x, ok := m.GetOneof().(*AnyOrExpression_Any); ok { + return x.Any + } + return nil +} + +func (m *AnyOrExpression) GetExpression() *Expression { + if x, ok := m.GetOneof().(*AnyOrExpression_Expression); ok { + return x.Expression + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AnyOrExpression) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AnyOrExpression_OneofMarshaler, _AnyOrExpression_OneofUnmarshaler, _AnyOrExpression_OneofSizer, []interface{}{ + (*AnyOrExpression_Any)(nil), + (*AnyOrExpression_Expression)(nil), + } +} + +func _AnyOrExpression_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AnyOrExpression) + // oneof + switch x := m.Oneof.(type) { + case *AnyOrExpression_Any: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Any); err != nil { + return err + } + case *AnyOrExpression_Expression: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Expression); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("AnyOrExpression.Oneof has unexpected type %T", x) + } + return nil +} + +func _AnyOrExpression_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AnyOrExpression) + switch tag { + case 1: // oneof.any + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Any) + err := b.DecodeMessage(msg) + m.Oneof = &AnyOrExpression_Any{msg} + return true, err + case 2: // oneof.expression + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Expression) + err := b.DecodeMessage(msg) + m.Oneof = &AnyOrExpression_Expression{msg} + return true, err + default: + return false, nil + } +} + +func _AnyOrExpression_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AnyOrExpression) + // oneof + switch x := m.Oneof.(type) { + case *AnyOrExpression_Any: + s := proto.Size(x.Any) + n += proto.SizeVarint(1<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *AnyOrExpression_Expression: + s := proto.Size(x.Expression) + n += proto.SizeVarint(2<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type AnysOrExpressions struct { + AdditionalProperties []*NamedAnyOrExpression `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` +} + +func (m *AnysOrExpressions) Reset() { *m = AnysOrExpressions{} } +func (m *AnysOrExpressions) String() string { return proto.CompactTextString(m) } +func (*AnysOrExpressions) ProtoMessage() {} +func (*AnysOrExpressions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } + +func (m *AnysOrExpressions) GetAdditionalProperties() []*NamedAnyOrExpression { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +// A map of possible out-of band callbacks related to the parent operation. Each value in the map is a Path Item Object that describes a set of requests that may be initiated by the API provider and the expected responses. The key value used to identify the callback object is an expression, evaluated at runtime, that identifies a URL to use for the callback operation. +type Callback struct { + Path []*NamedPathItem `protobuf:"bytes,1,rep,name=path" json:"path,omitempty"` + SpecificationExtension []*NamedAny `protobuf:"bytes,2,rep,name=specification_extension,json=specificationExtension" json:"specification_extension,omitempty"` +} + +func (m *Callback) Reset() { *m = Callback{} } +func (m *Callback) String() string { return proto.CompactTextString(m) } +func (*Callback) ProtoMessage() {} +func (*Callback) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } + +func (m *Callback) GetPath() []*NamedPathItem { + if m != nil { + return m.Path + } + return nil +} + +func (m *Callback) GetSpecificationExtension() []*NamedAny { + if m != nil { + return m.SpecificationExtension + } + return nil +} + +type CallbackOrReference struct { + // Types that are valid to be assigned to Oneof: + // *CallbackOrReference_Callback + // *CallbackOrReference_Reference + Oneof isCallbackOrReference_Oneof `protobuf_oneof:"oneof"` +} + +func (m *CallbackOrReference) Reset() { *m = CallbackOrReference{} } +func (m *CallbackOrReference) String() string { return proto.CompactTextString(m) } +func (*CallbackOrReference) ProtoMessage() {} +func (*CallbackOrReference) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} } + +type isCallbackOrReference_Oneof interface { + isCallbackOrReference_Oneof() +} + +type CallbackOrReference_Callback struct { + Callback *Callback `protobuf:"bytes,1,opt,name=callback,oneof"` +} +type CallbackOrReference_Reference struct { + Reference *Reference `protobuf:"bytes,2,opt,name=reference,oneof"` +} + +func (*CallbackOrReference_Callback) isCallbackOrReference_Oneof() {} +func (*CallbackOrReference_Reference) isCallbackOrReference_Oneof() {} + +func (m *CallbackOrReference) GetOneof() isCallbackOrReference_Oneof { + if m != nil { + return m.Oneof + } + return nil +} + +func (m *CallbackOrReference) GetCallback() *Callback { + if x, ok := m.GetOneof().(*CallbackOrReference_Callback); ok { + return x.Callback + } + return nil +} + +func (m *CallbackOrReference) GetReference() *Reference { + if x, ok := m.GetOneof().(*CallbackOrReference_Reference); ok { + return x.Reference + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CallbackOrReference) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CallbackOrReference_OneofMarshaler, _CallbackOrReference_OneofUnmarshaler, _CallbackOrReference_OneofSizer, []interface{}{ + (*CallbackOrReference_Callback)(nil), + (*CallbackOrReference_Reference)(nil), + } +} + +func _CallbackOrReference_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CallbackOrReference) + // oneof + switch x := m.Oneof.(type) { + case *CallbackOrReference_Callback: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Callback); err != nil { + return err + } + case *CallbackOrReference_Reference: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Reference); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("CallbackOrReference.Oneof has unexpected type %T", x) + } + return nil +} + +func _CallbackOrReference_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CallbackOrReference) + switch tag { + case 1: // oneof.callback + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Callback) + err := b.DecodeMessage(msg) + m.Oneof = &CallbackOrReference_Callback{msg} + return true, err + case 2: // oneof.reference + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Reference) + err := b.DecodeMessage(msg) + m.Oneof = &CallbackOrReference_Reference{msg} + return true, err + default: + return false, nil + } +} + +func _CallbackOrReference_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CallbackOrReference) + // oneof + switch x := m.Oneof.(type) { + case *CallbackOrReference_Callback: + s := proto.Size(x.Callback) + n += proto.SizeVarint(1<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *CallbackOrReference_Reference: + s := proto.Size(x.Reference) + n += proto.SizeVarint(2<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type CallbacksOrReferences struct { + AdditionalProperties []*NamedCallbackOrReference `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` +} + +func (m *CallbacksOrReferences) Reset() { *m = CallbacksOrReferences{} } +func (m *CallbacksOrReferences) String() string { return proto.CompactTextString(m) } +func (*CallbacksOrReferences) ProtoMessage() {} +func (*CallbacksOrReferences) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{6} } + +func (m *CallbacksOrReferences) GetAdditionalProperties() []*NamedCallbackOrReference { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +// Holds a set of reusable objects for different aspects of the OAS. All objects defined within the components object will have no effect on the API unless they are explicitly referenced from properties outside the components object. +type Components struct { + Schemas *SchemasOrReferences `protobuf:"bytes,1,opt,name=schemas" json:"schemas,omitempty"` + Responses *ResponsesOrReferences `protobuf:"bytes,2,opt,name=responses" json:"responses,omitempty"` + Parameters *ParametersOrReferences `protobuf:"bytes,3,opt,name=parameters" json:"parameters,omitempty"` + Examples *ExamplesOrReferences `protobuf:"bytes,4,opt,name=examples" json:"examples,omitempty"` + RequestBodies *RequestBodiesOrReferences `protobuf:"bytes,5,opt,name=request_bodies,json=requestBodies" json:"request_bodies,omitempty"` + Headers *HeadersOrReferences `protobuf:"bytes,6,opt,name=headers" json:"headers,omitempty"` + SecuritySchemes *SecuritySchemesOrReferences `protobuf:"bytes,7,opt,name=security_schemes,json=securitySchemes" json:"security_schemes,omitempty"` + Links *LinksOrReferences `protobuf:"bytes,8,opt,name=links" json:"links,omitempty"` + Callbacks *CallbacksOrReferences `protobuf:"bytes,9,opt,name=callbacks" json:"callbacks,omitempty"` + SpecificationExtension []*NamedAny `protobuf:"bytes,10,rep,name=specification_extension,json=specificationExtension" json:"specification_extension,omitempty"` +} + +func (m *Components) Reset() { *m = Components{} } +func (m *Components) String() string { return proto.CompactTextString(m) } +func (*Components) ProtoMessage() {} +func (*Components) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{7} } + +func (m *Components) GetSchemas() *SchemasOrReferences { + if m != nil { + return m.Schemas + } + return nil +} + +func (m *Components) GetResponses() *ResponsesOrReferences { + if m != nil { + return m.Responses + } + return nil +} + +func (m *Components) GetParameters() *ParametersOrReferences { + if m != nil { + return m.Parameters + } + return nil +} + +func (m *Components) GetExamples() *ExamplesOrReferences { + if m != nil { + return m.Examples + } + return nil +} + +func (m *Components) GetRequestBodies() *RequestBodiesOrReferences { + if m != nil { + return m.RequestBodies + } + return nil +} + +func (m *Components) GetHeaders() *HeadersOrReferences { + if m != nil { + return m.Headers + } + return nil +} + +func (m *Components) GetSecuritySchemes() *SecuritySchemesOrReferences { + if m != nil { + return m.SecuritySchemes + } + return nil +} + +func (m *Components) GetLinks() *LinksOrReferences { + if m != nil { + return m.Links + } + return nil +} + +func (m *Components) GetCallbacks() *CallbacksOrReferences { + if m != nil { + return m.Callbacks + } + return nil +} + +func (m *Components) GetSpecificationExtension() []*NamedAny { + if m != nil { + return m.SpecificationExtension + } + return nil +} + +// Contact information for the exposed API. +type Contact struct { + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Url string `protobuf:"bytes,2,opt,name=url" json:"url,omitempty"` + Email string `protobuf:"bytes,3,opt,name=email" json:"email,omitempty"` + SpecificationExtension []*NamedAny `protobuf:"bytes,4,rep,name=specification_extension,json=specificationExtension" json:"specification_extension,omitempty"` +} + +func (m *Contact) Reset() { *m = Contact{} } +func (m *Contact) String() string { return proto.CompactTextString(m) } +func (*Contact) ProtoMessage() {} +func (*Contact) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{8} } + +func (m *Contact) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Contact) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *Contact) GetEmail() string { + if m != nil { + return m.Email + } + return "" +} + +func (m *Contact) GetSpecificationExtension() []*NamedAny { + if m != nil { + return m.SpecificationExtension + } + return nil +} + +type DefaultType struct { + // Types that are valid to be assigned to Oneof: + // *DefaultType_Number + // *DefaultType_Boolean + // *DefaultType_String_ + Oneof isDefaultType_Oneof `protobuf_oneof:"oneof"` +} + +func (m *DefaultType) Reset() { *m = DefaultType{} } +func (m *DefaultType) String() string { return proto.CompactTextString(m) } +func (*DefaultType) ProtoMessage() {} +func (*DefaultType) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{9} } + +type isDefaultType_Oneof interface { + isDefaultType_Oneof() +} + +type DefaultType_Number struct { + Number float64 `protobuf:"fixed64,1,opt,name=number,oneof"` +} +type DefaultType_Boolean struct { + Boolean bool `protobuf:"varint,2,opt,name=boolean,oneof"` +} +type DefaultType_String_ struct { + String_ string `protobuf:"bytes,3,opt,name=string,oneof"` +} + +func (*DefaultType_Number) isDefaultType_Oneof() {} +func (*DefaultType_Boolean) isDefaultType_Oneof() {} +func (*DefaultType_String_) isDefaultType_Oneof() {} + +func (m *DefaultType) GetOneof() isDefaultType_Oneof { + if m != nil { + return m.Oneof + } + return nil +} + +func (m *DefaultType) GetNumber() float64 { + if x, ok := m.GetOneof().(*DefaultType_Number); ok { + return x.Number + } + return 0 +} + +func (m *DefaultType) GetBoolean() bool { + if x, ok := m.GetOneof().(*DefaultType_Boolean); ok { + return x.Boolean + } + return false +} + +func (m *DefaultType) GetString_() string { + if x, ok := m.GetOneof().(*DefaultType_String_); ok { + return x.String_ + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*DefaultType) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _DefaultType_OneofMarshaler, _DefaultType_OneofUnmarshaler, _DefaultType_OneofSizer, []interface{}{ + (*DefaultType_Number)(nil), + (*DefaultType_Boolean)(nil), + (*DefaultType_String_)(nil), + } +} + +func _DefaultType_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*DefaultType) + // oneof + switch x := m.Oneof.(type) { + case *DefaultType_Number: + b.EncodeVarint(1<<3 | proto.WireFixed64) + b.EncodeFixed64(math.Float64bits(x.Number)) + case *DefaultType_Boolean: + t := uint64(0) + if x.Boolean { + t = 1 + } + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *DefaultType_String_: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.String_) + case nil: + default: + return fmt.Errorf("DefaultType.Oneof has unexpected type %T", x) + } + return nil +} + +func _DefaultType_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*DefaultType) + switch tag { + case 1: // oneof.number + if wire != proto.WireFixed64 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed64() + m.Oneof = &DefaultType_Number{math.Float64frombits(x)} + return true, err + case 2: // oneof.boolean + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Oneof = &DefaultType_Boolean{x != 0} + return true, err + case 3: // oneof.string + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Oneof = &DefaultType_String_{x} + return true, err + default: + return false, nil + } +} + +func _DefaultType_OneofSizer(msg proto.Message) (n int) { + m := msg.(*DefaultType) + // oneof + switch x := m.Oneof.(type) { + case *DefaultType_Number: + n += proto.SizeVarint(1<<3 | proto.WireFixed64) + n += 8 + case *DefaultType_Boolean: + n += proto.SizeVarint(2<<3 | proto.WireVarint) + n += 1 + case *DefaultType_String_: + n += proto.SizeVarint(3<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(len(x.String_))) + n += len(x.String_) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// When request bodies or response payloads may be one of a number of different schemas, a `discriminator` object can be used to aid in serialization, deserialization, and validation. The discriminator is a specific object in a schema which is used to inform the consumer of the specification of an alternative schema based on the value associated with it. When using the discriminator, _inline_ schemas will not be considered. +type Discriminator struct { + PropertyName string `protobuf:"bytes,1,opt,name=property_name,json=propertyName" json:"property_name,omitempty"` + Mapping *Strings `protobuf:"bytes,2,opt,name=mapping" json:"mapping,omitempty"` +} + +func (m *Discriminator) Reset() { *m = Discriminator{} } +func (m *Discriminator) String() string { return proto.CompactTextString(m) } +func (*Discriminator) ProtoMessage() {} +func (*Discriminator) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{10} } + +func (m *Discriminator) GetPropertyName() string { + if m != nil { + return m.PropertyName + } + return "" +} + +func (m *Discriminator) GetMapping() *Strings { + if m != nil { + return m.Mapping + } + return nil +} + +type Document struct { + Openapi string `protobuf:"bytes,1,opt,name=openapi" json:"openapi,omitempty"` + Info *Info `protobuf:"bytes,2,opt,name=info" json:"info,omitempty"` + Servers []*Server `protobuf:"bytes,3,rep,name=servers" json:"servers,omitempty"` + Paths *Paths `protobuf:"bytes,4,opt,name=paths" json:"paths,omitempty"` + Components *Components `protobuf:"bytes,5,opt,name=components" json:"components,omitempty"` + Security []*SecurityRequirement `protobuf:"bytes,6,rep,name=security" json:"security,omitempty"` + Tags []*Tag `protobuf:"bytes,7,rep,name=tags" json:"tags,omitempty"` + ExternalDocs *ExternalDocs `protobuf:"bytes,8,opt,name=external_docs,json=externalDocs" json:"external_docs,omitempty"` + SpecificationExtension []*NamedAny `protobuf:"bytes,9,rep,name=specification_extension,json=specificationExtension" json:"specification_extension,omitempty"` +} + +func (m *Document) Reset() { *m = Document{} } +func (m *Document) String() string { return proto.CompactTextString(m) } +func (*Document) ProtoMessage() {} +func (*Document) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{11} } + +func (m *Document) GetOpenapi() string { + if m != nil { + return m.Openapi + } + return "" +} + +func (m *Document) GetInfo() *Info { + if m != nil { + return m.Info + } + return nil +} + +func (m *Document) GetServers() []*Server { + if m != nil { + return m.Servers + } + return nil +} + +func (m *Document) GetPaths() *Paths { + if m != nil { + return m.Paths + } + return nil +} + +func (m *Document) GetComponents() *Components { + if m != nil { + return m.Components + } + return nil +} + +func (m *Document) GetSecurity() []*SecurityRequirement { + if m != nil { + return m.Security + } + return nil +} + +func (m *Document) GetTags() []*Tag { + if m != nil { + return m.Tags + } + return nil +} + +func (m *Document) GetExternalDocs() *ExternalDocs { + if m != nil { + return m.ExternalDocs + } + return nil +} + +func (m *Document) GetSpecificationExtension() []*NamedAny { + if m != nil { + return m.SpecificationExtension + } + return nil +} + +// A single encoding definition applied to a single schema property. +type Encoding struct { + ContentType string `protobuf:"bytes,1,opt,name=content_type,json=contentType" json:"content_type,omitempty"` + Headers *HeadersOrReferences `protobuf:"bytes,2,opt,name=headers" json:"headers,omitempty"` + Style string `protobuf:"bytes,3,opt,name=style" json:"style,omitempty"` + Explode bool `protobuf:"varint,4,opt,name=explode" json:"explode,omitempty"` + AllowReserved bool `protobuf:"varint,5,opt,name=allow_reserved,json=allowReserved" json:"allow_reserved,omitempty"` + SpecificationExtension []*NamedAny `protobuf:"bytes,6,rep,name=specification_extension,json=specificationExtension" json:"specification_extension,omitempty"` +} + +func (m *Encoding) Reset() { *m = Encoding{} } +func (m *Encoding) String() string { return proto.CompactTextString(m) } +func (*Encoding) ProtoMessage() {} +func (*Encoding) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{12} } + +func (m *Encoding) GetContentType() string { + if m != nil { + return m.ContentType + } + return "" +} + +func (m *Encoding) GetHeaders() *HeadersOrReferences { + if m != nil { + return m.Headers + } + return nil +} + +func (m *Encoding) GetStyle() string { + if m != nil { + return m.Style + } + return "" +} + +func (m *Encoding) GetExplode() bool { + if m != nil { + return m.Explode + } + return false +} + +func (m *Encoding) GetAllowReserved() bool { + if m != nil { + return m.AllowReserved + } + return false +} + +func (m *Encoding) GetSpecificationExtension() []*NamedAny { + if m != nil { + return m.SpecificationExtension + } + return nil +} + +type Encodings struct { + AdditionalProperties []*NamedEncoding `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` +} + +func (m *Encodings) Reset() { *m = Encodings{} } +func (m *Encodings) String() string { return proto.CompactTextString(m) } +func (*Encodings) ProtoMessage() {} +func (*Encodings) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{13} } + +func (m *Encodings) GetAdditionalProperties() []*NamedEncoding { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +type Example struct { + Summary string `protobuf:"bytes,1,opt,name=summary" json:"summary,omitempty"` + Description string `protobuf:"bytes,2,opt,name=description" json:"description,omitempty"` + Value *Any `protobuf:"bytes,3,opt,name=value" json:"value,omitempty"` + ExternalValue string `protobuf:"bytes,4,opt,name=external_value,json=externalValue" json:"external_value,omitempty"` + SpecificationExtension []*NamedAny `protobuf:"bytes,5,rep,name=specification_extension,json=specificationExtension" json:"specification_extension,omitempty"` +} + +func (m *Example) Reset() { *m = Example{} } +func (m *Example) String() string { return proto.CompactTextString(m) } +func (*Example) ProtoMessage() {} +func (*Example) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{14} } + +func (m *Example) GetSummary() string { + if m != nil { + return m.Summary + } + return "" +} + +func (m *Example) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Example) GetValue() *Any { + if m != nil { + return m.Value + } + return nil +} + +func (m *Example) GetExternalValue() string { + if m != nil { + return m.ExternalValue + } + return "" +} + +func (m *Example) GetSpecificationExtension() []*NamedAny { + if m != nil { + return m.SpecificationExtension + } + return nil +} + +type ExampleOrReference struct { + // Types that are valid to be assigned to Oneof: + // *ExampleOrReference_Example + // *ExampleOrReference_Reference + Oneof isExampleOrReference_Oneof `protobuf_oneof:"oneof"` +} + +func (m *ExampleOrReference) Reset() { *m = ExampleOrReference{} } +func (m *ExampleOrReference) String() string { return proto.CompactTextString(m) } +func (*ExampleOrReference) ProtoMessage() {} +func (*ExampleOrReference) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{15} } + +type isExampleOrReference_Oneof interface { + isExampleOrReference_Oneof() +} + +type ExampleOrReference_Example struct { + Example *Example `protobuf:"bytes,1,opt,name=example,oneof"` +} +type ExampleOrReference_Reference struct { + Reference *Reference `protobuf:"bytes,2,opt,name=reference,oneof"` +} + +func (*ExampleOrReference_Example) isExampleOrReference_Oneof() {} +func (*ExampleOrReference_Reference) isExampleOrReference_Oneof() {} + +func (m *ExampleOrReference) GetOneof() isExampleOrReference_Oneof { + if m != nil { + return m.Oneof + } + return nil +} + +func (m *ExampleOrReference) GetExample() *Example { + if x, ok := m.GetOneof().(*ExampleOrReference_Example); ok { + return x.Example + } + return nil +} + +func (m *ExampleOrReference) GetReference() *Reference { + if x, ok := m.GetOneof().(*ExampleOrReference_Reference); ok { + return x.Reference + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ExampleOrReference) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ExampleOrReference_OneofMarshaler, _ExampleOrReference_OneofUnmarshaler, _ExampleOrReference_OneofSizer, []interface{}{ + (*ExampleOrReference_Example)(nil), + (*ExampleOrReference_Reference)(nil), + } +} + +func _ExampleOrReference_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ExampleOrReference) + // oneof + switch x := m.Oneof.(type) { + case *ExampleOrReference_Example: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Example); err != nil { + return err + } + case *ExampleOrReference_Reference: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Reference); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ExampleOrReference.Oneof has unexpected type %T", x) + } + return nil +} + +func _ExampleOrReference_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ExampleOrReference) + switch tag { + case 1: // oneof.example + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Example) + err := b.DecodeMessage(msg) + m.Oneof = &ExampleOrReference_Example{msg} + return true, err + case 2: // oneof.reference + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Reference) + err := b.DecodeMessage(msg) + m.Oneof = &ExampleOrReference_Reference{msg} + return true, err + default: + return false, nil + } +} + +func _ExampleOrReference_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ExampleOrReference) + // oneof + switch x := m.Oneof.(type) { + case *ExampleOrReference_Example: + s := proto.Size(x.Example) + n += proto.SizeVarint(1<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *ExampleOrReference_Reference: + s := proto.Size(x.Reference) + n += proto.SizeVarint(2<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type Examples struct { +} + +func (m *Examples) Reset() { *m = Examples{} } +func (m *Examples) String() string { return proto.CompactTextString(m) } +func (*Examples) ProtoMessage() {} +func (*Examples) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{16} } + +type ExamplesOrReferences struct { + AdditionalProperties []*NamedExampleOrReference `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` +} + +func (m *ExamplesOrReferences) Reset() { *m = ExamplesOrReferences{} } +func (m *ExamplesOrReferences) String() string { return proto.CompactTextString(m) } +func (*ExamplesOrReferences) ProtoMessage() {} +func (*ExamplesOrReferences) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{17} } + +func (m *ExamplesOrReferences) GetAdditionalProperties() []*NamedExampleOrReference { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +type Expression struct { + AdditionalProperties []*NamedAny `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` +} + +func (m *Expression) Reset() { *m = Expression{} } +func (m *Expression) String() string { return proto.CompactTextString(m) } +func (*Expression) ProtoMessage() {} +func (*Expression) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{18} } + +func (m *Expression) GetAdditionalProperties() []*NamedAny { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +// Allows referencing an external resource for extended documentation. +type ExternalDocs struct { + Description string `protobuf:"bytes,1,opt,name=description" json:"description,omitempty"` + Url string `protobuf:"bytes,2,opt,name=url" json:"url,omitempty"` + SpecificationExtension []*NamedAny `protobuf:"bytes,3,rep,name=specification_extension,json=specificationExtension" json:"specification_extension,omitempty"` +} + +func (m *ExternalDocs) Reset() { *m = ExternalDocs{} } +func (m *ExternalDocs) String() string { return proto.CompactTextString(m) } +func (*ExternalDocs) ProtoMessage() {} +func (*ExternalDocs) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{19} } + +func (m *ExternalDocs) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *ExternalDocs) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *ExternalDocs) GetSpecificationExtension() []*NamedAny { + if m != nil { + return m.SpecificationExtension + } + return nil +} + +// The Header Object follows the structure of the Parameter Object with the following changes: 1. `name` MUST NOT be specified, it is given in the corresponding `headers` map. 1. `in` MUST NOT be specified, it is implicitly in `header`. 1. All traits that are affected by the location MUST be applicable to a location of `header` (for example, `style`). +type Header struct { + Description string `protobuf:"bytes,1,opt,name=description" json:"description,omitempty"` + Required bool `protobuf:"varint,2,opt,name=required" json:"required,omitempty"` + Deprecated bool `protobuf:"varint,3,opt,name=deprecated" json:"deprecated,omitempty"` + AllowEmptyValue bool `protobuf:"varint,4,opt,name=allow_empty_value,json=allowEmptyValue" json:"allow_empty_value,omitempty"` + Style string `protobuf:"bytes,5,opt,name=style" json:"style,omitempty"` + Explode bool `protobuf:"varint,6,opt,name=explode" json:"explode,omitempty"` + AllowReserved bool `protobuf:"varint,7,opt,name=allow_reserved,json=allowReserved" json:"allow_reserved,omitempty"` + Schema *SchemaOrReference `protobuf:"bytes,8,opt,name=schema" json:"schema,omitempty"` + Example *Any `protobuf:"bytes,9,opt,name=example" json:"example,omitempty"` + Examples *ExamplesOrReferences `protobuf:"bytes,10,opt,name=examples" json:"examples,omitempty"` + Content *MediaTypes `protobuf:"bytes,11,opt,name=content" json:"content,omitempty"` + SpecificationExtension []*NamedAny `protobuf:"bytes,12,rep,name=specification_extension,json=specificationExtension" json:"specification_extension,omitempty"` +} + +func (m *Header) Reset() { *m = Header{} } +func (m *Header) String() string { return proto.CompactTextString(m) } +func (*Header) ProtoMessage() {} +func (*Header) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{20} } + +func (m *Header) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Header) GetRequired() bool { + if m != nil { + return m.Required + } + return false +} + +func (m *Header) GetDeprecated() bool { + if m != nil { + return m.Deprecated + } + return false +} + +func (m *Header) GetAllowEmptyValue() bool { + if m != nil { + return m.AllowEmptyValue + } + return false +} + +func (m *Header) GetStyle() string { + if m != nil { + return m.Style + } + return "" +} + +func (m *Header) GetExplode() bool { + if m != nil { + return m.Explode + } + return false +} + +func (m *Header) GetAllowReserved() bool { + if m != nil { + return m.AllowReserved + } + return false +} + +func (m *Header) GetSchema() *SchemaOrReference { + if m != nil { + return m.Schema + } + return nil +} + +func (m *Header) GetExample() *Any { + if m != nil { + return m.Example + } + return nil +} + +func (m *Header) GetExamples() *ExamplesOrReferences { + if m != nil { + return m.Examples + } + return nil +} + +func (m *Header) GetContent() *MediaTypes { + if m != nil { + return m.Content + } + return nil +} + +func (m *Header) GetSpecificationExtension() []*NamedAny { + if m != nil { + return m.SpecificationExtension + } + return nil +} + +type HeaderOrReference struct { + // Types that are valid to be assigned to Oneof: + // *HeaderOrReference_Header + // *HeaderOrReference_Reference + Oneof isHeaderOrReference_Oneof `protobuf_oneof:"oneof"` +} + +func (m *HeaderOrReference) Reset() { *m = HeaderOrReference{} } +func (m *HeaderOrReference) String() string { return proto.CompactTextString(m) } +func (*HeaderOrReference) ProtoMessage() {} +func (*HeaderOrReference) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{21} } + +type isHeaderOrReference_Oneof interface { + isHeaderOrReference_Oneof() +} + +type HeaderOrReference_Header struct { + Header *Header `protobuf:"bytes,1,opt,name=header,oneof"` +} +type HeaderOrReference_Reference struct { + Reference *Reference `protobuf:"bytes,2,opt,name=reference,oneof"` +} + +func (*HeaderOrReference_Header) isHeaderOrReference_Oneof() {} +func (*HeaderOrReference_Reference) isHeaderOrReference_Oneof() {} + +func (m *HeaderOrReference) GetOneof() isHeaderOrReference_Oneof { + if m != nil { + return m.Oneof + } + return nil +} + +func (m *HeaderOrReference) GetHeader() *Header { + if x, ok := m.GetOneof().(*HeaderOrReference_Header); ok { + return x.Header + } + return nil +} + +func (m *HeaderOrReference) GetReference() *Reference { + if x, ok := m.GetOneof().(*HeaderOrReference_Reference); ok { + return x.Reference + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*HeaderOrReference) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _HeaderOrReference_OneofMarshaler, _HeaderOrReference_OneofUnmarshaler, _HeaderOrReference_OneofSizer, []interface{}{ + (*HeaderOrReference_Header)(nil), + (*HeaderOrReference_Reference)(nil), + } +} + +func _HeaderOrReference_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*HeaderOrReference) + // oneof + switch x := m.Oneof.(type) { + case *HeaderOrReference_Header: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Header); err != nil { + return err + } + case *HeaderOrReference_Reference: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Reference); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("HeaderOrReference.Oneof has unexpected type %T", x) + } + return nil +} + +func _HeaderOrReference_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*HeaderOrReference) + switch tag { + case 1: // oneof.header + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Header) + err := b.DecodeMessage(msg) + m.Oneof = &HeaderOrReference_Header{msg} + return true, err + case 2: // oneof.reference + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Reference) + err := b.DecodeMessage(msg) + m.Oneof = &HeaderOrReference_Reference{msg} + return true, err + default: + return false, nil + } +} + +func _HeaderOrReference_OneofSizer(msg proto.Message) (n int) { + m := msg.(*HeaderOrReference) + // oneof + switch x := m.Oneof.(type) { + case *HeaderOrReference_Header: + s := proto.Size(x.Header) + n += proto.SizeVarint(1<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *HeaderOrReference_Reference: + s := proto.Size(x.Reference) + n += proto.SizeVarint(2<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type HeadersOrReferences struct { + AdditionalProperties []*NamedHeaderOrReference `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` +} + +func (m *HeadersOrReferences) Reset() { *m = HeadersOrReferences{} } +func (m *HeadersOrReferences) String() string { return proto.CompactTextString(m) } +func (*HeadersOrReferences) ProtoMessage() {} +func (*HeadersOrReferences) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{22} } + +func (m *HeadersOrReferences) GetAdditionalProperties() []*NamedHeaderOrReference { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +// The object provides metadata about the API. The metadata MAY be used by the clients if needed, and MAY be presented in editing or documentation generation tools for convenience. +type Info struct { + Title string `protobuf:"bytes,1,opt,name=title" json:"title,omitempty"` + Description string `protobuf:"bytes,2,opt,name=description" json:"description,omitempty"` + TermsOfService string `protobuf:"bytes,3,opt,name=terms_of_service,json=termsOfService" json:"terms_of_service,omitempty"` + Contact *Contact `protobuf:"bytes,4,opt,name=contact" json:"contact,omitempty"` + License *License `protobuf:"bytes,5,opt,name=license" json:"license,omitempty"` + Version string `protobuf:"bytes,6,opt,name=version" json:"version,omitempty"` + SpecificationExtension []*NamedAny `protobuf:"bytes,7,rep,name=specification_extension,json=specificationExtension" json:"specification_extension,omitempty"` +} + +func (m *Info) Reset() { *m = Info{} } +func (m *Info) String() string { return proto.CompactTextString(m) } +func (*Info) ProtoMessage() {} +func (*Info) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{23} } + +func (m *Info) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *Info) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Info) GetTermsOfService() string { + if m != nil { + return m.TermsOfService + } + return "" +} + +func (m *Info) GetContact() *Contact { + if m != nil { + return m.Contact + } + return nil +} + +func (m *Info) GetLicense() *License { + if m != nil { + return m.License + } + return nil +} + +func (m *Info) GetVersion() string { + if m != nil { + return m.Version + } + return "" +} + +func (m *Info) GetSpecificationExtension() []*NamedAny { + if m != nil { + return m.SpecificationExtension + } + return nil +} + +type ItemsItem struct { + SchemaOrReference []*SchemaOrReference `protobuf:"bytes,1,rep,name=schema_or_reference,json=schemaOrReference" json:"schema_or_reference,omitempty"` +} + +func (m *ItemsItem) Reset() { *m = ItemsItem{} } +func (m *ItemsItem) String() string { return proto.CompactTextString(m) } +func (*ItemsItem) ProtoMessage() {} +func (*ItemsItem) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{24} } + +func (m *ItemsItem) GetSchemaOrReference() []*SchemaOrReference { + if m != nil { + return m.SchemaOrReference + } + return nil +} + +// License information for the exposed API. +type License struct { + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Url string `protobuf:"bytes,2,opt,name=url" json:"url,omitempty"` + SpecificationExtension []*NamedAny `protobuf:"bytes,3,rep,name=specification_extension,json=specificationExtension" json:"specification_extension,omitempty"` +} + +func (m *License) Reset() { *m = License{} } +func (m *License) String() string { return proto.CompactTextString(m) } +func (*License) ProtoMessage() {} +func (*License) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{25} } + +func (m *License) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *License) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *License) GetSpecificationExtension() []*NamedAny { + if m != nil { + return m.SpecificationExtension + } + return nil +} + +// The `Link object` represents a possible design-time link for a response. The presence of a link does not guarantee the caller's ability to successfully invoke it, rather it provides a known relationship and traversal mechanism between responses and other operations. Unlike _dynamic_ links (i.e. links provided **in** the response payload), the OAS linking mechanism does not require link information in the runtime response. For computing links, and providing instructions to execute them, a runtime expression is used for accessing values in an operation and using them as parameters while invoking the linked operation. +type Link struct { + OperationRef string `protobuf:"bytes,1,opt,name=operation_ref,json=operationRef" json:"operation_ref,omitempty"` + OperationId string `protobuf:"bytes,2,opt,name=operation_id,json=operationId" json:"operation_id,omitempty"` + Parameters *AnysOrExpressions `protobuf:"bytes,3,opt,name=parameters" json:"parameters,omitempty"` + RequestBody *AnyOrExpression `protobuf:"bytes,4,opt,name=request_body,json=requestBody" json:"request_body,omitempty"` + Description string `protobuf:"bytes,5,opt,name=description" json:"description,omitempty"` + Server *Server `protobuf:"bytes,6,opt,name=server" json:"server,omitempty"` + SpecificationExtension []*NamedAny `protobuf:"bytes,7,rep,name=specification_extension,json=specificationExtension" json:"specification_extension,omitempty"` +} + +func (m *Link) Reset() { *m = Link{} } +func (m *Link) String() string { return proto.CompactTextString(m) } +func (*Link) ProtoMessage() {} +func (*Link) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{26} } + +func (m *Link) GetOperationRef() string { + if m != nil { + return m.OperationRef + } + return "" +} + +func (m *Link) GetOperationId() string { + if m != nil { + return m.OperationId + } + return "" +} + +func (m *Link) GetParameters() *AnysOrExpressions { + if m != nil { + return m.Parameters + } + return nil +} + +func (m *Link) GetRequestBody() *AnyOrExpression { + if m != nil { + return m.RequestBody + } + return nil +} + +func (m *Link) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Link) GetServer() *Server { + if m != nil { + return m.Server + } + return nil +} + +func (m *Link) GetSpecificationExtension() []*NamedAny { + if m != nil { + return m.SpecificationExtension + } + return nil +} + +type LinkOrReference struct { + // Types that are valid to be assigned to Oneof: + // *LinkOrReference_Link + // *LinkOrReference_Reference + Oneof isLinkOrReference_Oneof `protobuf_oneof:"oneof"` +} + +func (m *LinkOrReference) Reset() { *m = LinkOrReference{} } +func (m *LinkOrReference) String() string { return proto.CompactTextString(m) } +func (*LinkOrReference) ProtoMessage() {} +func (*LinkOrReference) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{27} } + +type isLinkOrReference_Oneof interface { + isLinkOrReference_Oneof() +} + +type LinkOrReference_Link struct { + Link *Link `protobuf:"bytes,1,opt,name=link,oneof"` +} +type LinkOrReference_Reference struct { + Reference *Reference `protobuf:"bytes,2,opt,name=reference,oneof"` +} + +func (*LinkOrReference_Link) isLinkOrReference_Oneof() {} +func (*LinkOrReference_Reference) isLinkOrReference_Oneof() {} + +func (m *LinkOrReference) GetOneof() isLinkOrReference_Oneof { + if m != nil { + return m.Oneof + } + return nil +} + +func (m *LinkOrReference) GetLink() *Link { + if x, ok := m.GetOneof().(*LinkOrReference_Link); ok { + return x.Link + } + return nil +} + +func (m *LinkOrReference) GetReference() *Reference { + if x, ok := m.GetOneof().(*LinkOrReference_Reference); ok { + return x.Reference + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*LinkOrReference) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _LinkOrReference_OneofMarshaler, _LinkOrReference_OneofUnmarshaler, _LinkOrReference_OneofSizer, []interface{}{ + (*LinkOrReference_Link)(nil), + (*LinkOrReference_Reference)(nil), + } +} + +func _LinkOrReference_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*LinkOrReference) + // oneof + switch x := m.Oneof.(type) { + case *LinkOrReference_Link: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Link); err != nil { + return err + } + case *LinkOrReference_Reference: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Reference); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("LinkOrReference.Oneof has unexpected type %T", x) + } + return nil +} + +func _LinkOrReference_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*LinkOrReference) + switch tag { + case 1: // oneof.link + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Link) + err := b.DecodeMessage(msg) + m.Oneof = &LinkOrReference_Link{msg} + return true, err + case 2: // oneof.reference + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Reference) + err := b.DecodeMessage(msg) + m.Oneof = &LinkOrReference_Reference{msg} + return true, err + default: + return false, nil + } +} + +func _LinkOrReference_OneofSizer(msg proto.Message) (n int) { + m := msg.(*LinkOrReference) + // oneof + switch x := m.Oneof.(type) { + case *LinkOrReference_Link: + s := proto.Size(x.Link) + n += proto.SizeVarint(1<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *LinkOrReference_Reference: + s := proto.Size(x.Reference) + n += proto.SizeVarint(2<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type LinksOrReferences struct { + AdditionalProperties []*NamedLinkOrReference `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` +} + +func (m *LinksOrReferences) Reset() { *m = LinksOrReferences{} } +func (m *LinksOrReferences) String() string { return proto.CompactTextString(m) } +func (*LinksOrReferences) ProtoMessage() {} +func (*LinksOrReferences) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{28} } + +func (m *LinksOrReferences) GetAdditionalProperties() []*NamedLinkOrReference { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +// Each Media Type Object provides schema and examples for the media type identified by its key. +type MediaType struct { + Schema *SchemaOrReference `protobuf:"bytes,1,opt,name=schema" json:"schema,omitempty"` + Example *Any `protobuf:"bytes,2,opt,name=example" json:"example,omitempty"` + Examples *ExamplesOrReferences `protobuf:"bytes,3,opt,name=examples" json:"examples,omitempty"` + Encoding *Encodings `protobuf:"bytes,4,opt,name=encoding" json:"encoding,omitempty"` + SpecificationExtension []*NamedAny `protobuf:"bytes,5,rep,name=specification_extension,json=specificationExtension" json:"specification_extension,omitempty"` +} + +func (m *MediaType) Reset() { *m = MediaType{} } +func (m *MediaType) String() string { return proto.CompactTextString(m) } +func (*MediaType) ProtoMessage() {} +func (*MediaType) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{29} } + +func (m *MediaType) GetSchema() *SchemaOrReference { + if m != nil { + return m.Schema + } + return nil +} + +func (m *MediaType) GetExample() *Any { + if m != nil { + return m.Example + } + return nil +} + +func (m *MediaType) GetExamples() *ExamplesOrReferences { + if m != nil { + return m.Examples + } + return nil +} + +func (m *MediaType) GetEncoding() *Encodings { + if m != nil { + return m.Encoding + } + return nil +} + +func (m *MediaType) GetSpecificationExtension() []*NamedAny { + if m != nil { + return m.SpecificationExtension + } + return nil +} + +type MediaTypes struct { + AdditionalProperties []*NamedMediaType `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` +} + +func (m *MediaTypes) Reset() { *m = MediaTypes{} } +func (m *MediaTypes) String() string { return proto.CompactTextString(m) } +func (*MediaTypes) ProtoMessage() {} +func (*MediaTypes) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{30} } + +func (m *MediaTypes) GetAdditionalProperties() []*NamedMediaType { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +// Automatically-generated message used to represent maps of Any as ordered (name,value) pairs. +type NamedAny struct { + // Map key + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Mapped value + Value *Any `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` +} + +func (m *NamedAny) Reset() { *m = NamedAny{} } +func (m *NamedAny) String() string { return proto.CompactTextString(m) } +func (*NamedAny) ProtoMessage() {} +func (*NamedAny) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{31} } + +func (m *NamedAny) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NamedAny) GetValue() *Any { + if m != nil { + return m.Value + } + return nil +} + +// Automatically-generated message used to represent maps of AnyOrExpression as ordered (name,value) pairs. +type NamedAnyOrExpression struct { + // Map key + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Mapped value + Value *AnyOrExpression `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` +} + +func (m *NamedAnyOrExpression) Reset() { *m = NamedAnyOrExpression{} } +func (m *NamedAnyOrExpression) String() string { return proto.CompactTextString(m) } +func (*NamedAnyOrExpression) ProtoMessage() {} +func (*NamedAnyOrExpression) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{32} } + +func (m *NamedAnyOrExpression) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NamedAnyOrExpression) GetValue() *AnyOrExpression { + if m != nil { + return m.Value + } + return nil +} + +// Automatically-generated message used to represent maps of CallbackOrReference as ordered (name,value) pairs. +type NamedCallbackOrReference struct { + // Map key + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Mapped value + Value *CallbackOrReference `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` +} + +func (m *NamedCallbackOrReference) Reset() { *m = NamedCallbackOrReference{} } +func (m *NamedCallbackOrReference) String() string { return proto.CompactTextString(m) } +func (*NamedCallbackOrReference) ProtoMessage() {} +func (*NamedCallbackOrReference) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{33} } + +func (m *NamedCallbackOrReference) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NamedCallbackOrReference) GetValue() *CallbackOrReference { + if m != nil { + return m.Value + } + return nil +} + +// Automatically-generated message used to represent maps of Encoding as ordered (name,value) pairs. +type NamedEncoding struct { + // Map key + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Mapped value + Value *Encoding `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` +} + +func (m *NamedEncoding) Reset() { *m = NamedEncoding{} } +func (m *NamedEncoding) String() string { return proto.CompactTextString(m) } +func (*NamedEncoding) ProtoMessage() {} +func (*NamedEncoding) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{34} } + +func (m *NamedEncoding) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NamedEncoding) GetValue() *Encoding { + if m != nil { + return m.Value + } + return nil +} + +// Automatically-generated message used to represent maps of ExampleOrReference as ordered (name,value) pairs. +type NamedExampleOrReference struct { + // Map key + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Mapped value + Value *ExampleOrReference `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` +} + +func (m *NamedExampleOrReference) Reset() { *m = NamedExampleOrReference{} } +func (m *NamedExampleOrReference) String() string { return proto.CompactTextString(m) } +func (*NamedExampleOrReference) ProtoMessage() {} +func (*NamedExampleOrReference) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{35} } + +func (m *NamedExampleOrReference) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NamedExampleOrReference) GetValue() *ExampleOrReference { + if m != nil { + return m.Value + } + return nil +} + +// Automatically-generated message used to represent maps of HeaderOrReference as ordered (name,value) pairs. +type NamedHeaderOrReference struct { + // Map key + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Mapped value + Value *HeaderOrReference `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` +} + +func (m *NamedHeaderOrReference) Reset() { *m = NamedHeaderOrReference{} } +func (m *NamedHeaderOrReference) String() string { return proto.CompactTextString(m) } +func (*NamedHeaderOrReference) ProtoMessage() {} +func (*NamedHeaderOrReference) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{36} } + +func (m *NamedHeaderOrReference) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NamedHeaderOrReference) GetValue() *HeaderOrReference { + if m != nil { + return m.Value + } + return nil +} + +// Automatically-generated message used to represent maps of LinkOrReference as ordered (name,value) pairs. +type NamedLinkOrReference struct { + // Map key + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Mapped value + Value *LinkOrReference `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` +} + +func (m *NamedLinkOrReference) Reset() { *m = NamedLinkOrReference{} } +func (m *NamedLinkOrReference) String() string { return proto.CompactTextString(m) } +func (*NamedLinkOrReference) ProtoMessage() {} +func (*NamedLinkOrReference) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{37} } + +func (m *NamedLinkOrReference) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NamedLinkOrReference) GetValue() *LinkOrReference { + if m != nil { + return m.Value + } + return nil +} + +// Automatically-generated message used to represent maps of MediaType as ordered (name,value) pairs. +type NamedMediaType struct { + // Map key + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Mapped value + Value *MediaType `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` +} + +func (m *NamedMediaType) Reset() { *m = NamedMediaType{} } +func (m *NamedMediaType) String() string { return proto.CompactTextString(m) } +func (*NamedMediaType) ProtoMessage() {} +func (*NamedMediaType) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{38} } + +func (m *NamedMediaType) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NamedMediaType) GetValue() *MediaType { + if m != nil { + return m.Value + } + return nil +} + +// Automatically-generated message used to represent maps of ParameterOrReference as ordered (name,value) pairs. +type NamedParameterOrReference struct { + // Map key + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Mapped value + Value *ParameterOrReference `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` +} + +func (m *NamedParameterOrReference) Reset() { *m = NamedParameterOrReference{} } +func (m *NamedParameterOrReference) String() string { return proto.CompactTextString(m) } +func (*NamedParameterOrReference) ProtoMessage() {} +func (*NamedParameterOrReference) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{39} } + +func (m *NamedParameterOrReference) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NamedParameterOrReference) GetValue() *ParameterOrReference { + if m != nil { + return m.Value + } + return nil +} + +// Automatically-generated message used to represent maps of PathItem as ordered (name,value) pairs. +type NamedPathItem struct { + // Map key + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Mapped value + Value *PathItem `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` +} + +func (m *NamedPathItem) Reset() { *m = NamedPathItem{} } +func (m *NamedPathItem) String() string { return proto.CompactTextString(m) } +func (*NamedPathItem) ProtoMessage() {} +func (*NamedPathItem) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{40} } + +func (m *NamedPathItem) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NamedPathItem) GetValue() *PathItem { + if m != nil { + return m.Value + } + return nil +} + +// Automatically-generated message used to represent maps of RequestBodyOrReference as ordered (name,value) pairs. +type NamedRequestBodyOrReference struct { + // Map key + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Mapped value + Value *RequestBodyOrReference `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` +} + +func (m *NamedRequestBodyOrReference) Reset() { *m = NamedRequestBodyOrReference{} } +func (m *NamedRequestBodyOrReference) String() string { return proto.CompactTextString(m) } +func (*NamedRequestBodyOrReference) ProtoMessage() {} +func (*NamedRequestBodyOrReference) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{41} } + +func (m *NamedRequestBodyOrReference) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NamedRequestBodyOrReference) GetValue() *RequestBodyOrReference { + if m != nil { + return m.Value + } + return nil +} + +// Automatically-generated message used to represent maps of ResponseOrReference as ordered (name,value) pairs. +type NamedResponseOrReference struct { + // Map key + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Mapped value + Value *ResponseOrReference `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` +} + +func (m *NamedResponseOrReference) Reset() { *m = NamedResponseOrReference{} } +func (m *NamedResponseOrReference) String() string { return proto.CompactTextString(m) } +func (*NamedResponseOrReference) ProtoMessage() {} +func (*NamedResponseOrReference) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{42} } + +func (m *NamedResponseOrReference) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NamedResponseOrReference) GetValue() *ResponseOrReference { + if m != nil { + return m.Value + } + return nil +} + +// Automatically-generated message used to represent maps of SchemaOrReference as ordered (name,value) pairs. +type NamedSchemaOrReference struct { + // Map key + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Mapped value + Value *SchemaOrReference `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` +} + +func (m *NamedSchemaOrReference) Reset() { *m = NamedSchemaOrReference{} } +func (m *NamedSchemaOrReference) String() string { return proto.CompactTextString(m) } +func (*NamedSchemaOrReference) ProtoMessage() {} +func (*NamedSchemaOrReference) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{43} } + +func (m *NamedSchemaOrReference) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NamedSchemaOrReference) GetValue() *SchemaOrReference { + if m != nil { + return m.Value + } + return nil +} + +// Automatically-generated message used to represent maps of SecuritySchemeOrReference as ordered (name,value) pairs. +type NamedSecuritySchemeOrReference struct { + // Map key + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Mapped value + Value *SecuritySchemeOrReference `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` +} + +func (m *NamedSecuritySchemeOrReference) Reset() { *m = NamedSecuritySchemeOrReference{} } +func (m *NamedSecuritySchemeOrReference) String() string { return proto.CompactTextString(m) } +func (*NamedSecuritySchemeOrReference) ProtoMessage() {} +func (*NamedSecuritySchemeOrReference) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{44} } + +func (m *NamedSecuritySchemeOrReference) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NamedSecuritySchemeOrReference) GetValue() *SecuritySchemeOrReference { + if m != nil { + return m.Value + } + return nil +} + +// Automatically-generated message used to represent maps of ServerVariable as ordered (name,value) pairs. +type NamedServerVariable struct { + // Map key + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Mapped value + Value *ServerVariable `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` +} + +func (m *NamedServerVariable) Reset() { *m = NamedServerVariable{} } +func (m *NamedServerVariable) String() string { return proto.CompactTextString(m) } +func (*NamedServerVariable) ProtoMessage() {} +func (*NamedServerVariable) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{45} } + +func (m *NamedServerVariable) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NamedServerVariable) GetValue() *ServerVariable { + if m != nil { + return m.Value + } + return nil +} + +// Automatically-generated message used to represent maps of string as ordered (name,value) pairs. +type NamedString struct { + // Map key + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Mapped value + Value string `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` +} + +func (m *NamedString) Reset() { *m = NamedString{} } +func (m *NamedString) String() string { return proto.CompactTextString(m) } +func (*NamedString) ProtoMessage() {} +func (*NamedString) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{46} } + +func (m *NamedString) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NamedString) GetValue() string { + if m != nil { + return m.Value + } + return "" +} + +// Configuration details for a supported OAuth Flow +type OauthFlow struct { + AuthorizationUrl string `protobuf:"bytes,1,opt,name=authorization_url,json=authorizationUrl" json:"authorization_url,omitempty"` + TokenUrl string `protobuf:"bytes,2,opt,name=token_url,json=tokenUrl" json:"token_url,omitempty"` + RefreshUrl string `protobuf:"bytes,3,opt,name=refresh_url,json=refreshUrl" json:"refresh_url,omitempty"` + Scopes *Strings `protobuf:"bytes,4,opt,name=scopes" json:"scopes,omitempty"` + SpecificationExtension []*NamedAny `protobuf:"bytes,5,rep,name=specification_extension,json=specificationExtension" json:"specification_extension,omitempty"` +} + +func (m *OauthFlow) Reset() { *m = OauthFlow{} } +func (m *OauthFlow) String() string { return proto.CompactTextString(m) } +func (*OauthFlow) ProtoMessage() {} +func (*OauthFlow) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{47} } + +func (m *OauthFlow) GetAuthorizationUrl() string { + if m != nil { + return m.AuthorizationUrl + } + return "" +} + +func (m *OauthFlow) GetTokenUrl() string { + if m != nil { + return m.TokenUrl + } + return "" +} + +func (m *OauthFlow) GetRefreshUrl() string { + if m != nil { + return m.RefreshUrl + } + return "" +} + +func (m *OauthFlow) GetScopes() *Strings { + if m != nil { + return m.Scopes + } + return nil +} + +func (m *OauthFlow) GetSpecificationExtension() []*NamedAny { + if m != nil { + return m.SpecificationExtension + } + return nil +} + +// Allows configuration of the supported OAuth Flows. +type OauthFlows struct { + Implicit *OauthFlow `protobuf:"bytes,1,opt,name=implicit" json:"implicit,omitempty"` + Password *OauthFlow `protobuf:"bytes,2,opt,name=password" json:"password,omitempty"` + ClientCredentials *OauthFlow `protobuf:"bytes,3,opt,name=client_credentials,json=clientCredentials" json:"client_credentials,omitempty"` + AuthorizationCode *OauthFlow `protobuf:"bytes,4,opt,name=authorization_code,json=authorizationCode" json:"authorization_code,omitempty"` + SpecificationExtension []*NamedAny `protobuf:"bytes,5,rep,name=specification_extension,json=specificationExtension" json:"specification_extension,omitempty"` +} + +func (m *OauthFlows) Reset() { *m = OauthFlows{} } +func (m *OauthFlows) String() string { return proto.CompactTextString(m) } +func (*OauthFlows) ProtoMessage() {} +func (*OauthFlows) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{48} } + +func (m *OauthFlows) GetImplicit() *OauthFlow { + if m != nil { + return m.Implicit + } + return nil +} + +func (m *OauthFlows) GetPassword() *OauthFlow { + if m != nil { + return m.Password + } + return nil +} + +func (m *OauthFlows) GetClientCredentials() *OauthFlow { + if m != nil { + return m.ClientCredentials + } + return nil +} + +func (m *OauthFlows) GetAuthorizationCode() *OauthFlow { + if m != nil { + return m.AuthorizationCode + } + return nil +} + +func (m *OauthFlows) GetSpecificationExtension() []*NamedAny { + if m != nil { + return m.SpecificationExtension + } + return nil +} + +type Object struct { + AdditionalProperties []*NamedAny `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` +} + +func (m *Object) Reset() { *m = Object{} } +func (m *Object) String() string { return proto.CompactTextString(m) } +func (*Object) ProtoMessage() {} +func (*Object) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{49} } + +func (m *Object) GetAdditionalProperties() []*NamedAny { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +// Describes a single API operation on a path. +type Operation struct { + Tags []string `protobuf:"bytes,1,rep,name=tags" json:"tags,omitempty"` + Summary string `protobuf:"bytes,2,opt,name=summary" json:"summary,omitempty"` + Description string `protobuf:"bytes,3,opt,name=description" json:"description,omitempty"` + ExternalDocs *ExternalDocs `protobuf:"bytes,4,opt,name=external_docs,json=externalDocs" json:"external_docs,omitempty"` + OperationId string `protobuf:"bytes,5,opt,name=operation_id,json=operationId" json:"operation_id,omitempty"` + Parameters []*ParameterOrReference `protobuf:"bytes,6,rep,name=parameters" json:"parameters,omitempty"` + RequestBody *RequestBodyOrReference `protobuf:"bytes,7,opt,name=request_body,json=requestBody" json:"request_body,omitempty"` + Responses *Responses `protobuf:"bytes,8,opt,name=responses" json:"responses,omitempty"` + Callbacks *CallbacksOrReferences `protobuf:"bytes,9,opt,name=callbacks" json:"callbacks,omitempty"` + Deprecated bool `protobuf:"varint,10,opt,name=deprecated" json:"deprecated,omitempty"` + Security []*SecurityRequirement `protobuf:"bytes,11,rep,name=security" json:"security,omitempty"` + Servers []*Server `protobuf:"bytes,12,rep,name=servers" json:"servers,omitempty"` + SpecificationExtension []*NamedAny `protobuf:"bytes,13,rep,name=specification_extension,json=specificationExtension" json:"specification_extension,omitempty"` +} + +func (m *Operation) Reset() { *m = Operation{} } +func (m *Operation) String() string { return proto.CompactTextString(m) } +func (*Operation) ProtoMessage() {} +func (*Operation) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{50} } + +func (m *Operation) GetTags() []string { + if m != nil { + return m.Tags + } + return nil +} + +func (m *Operation) GetSummary() string { + if m != nil { + return m.Summary + } + return "" +} + +func (m *Operation) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Operation) GetExternalDocs() *ExternalDocs { + if m != nil { + return m.ExternalDocs + } + return nil +} + +func (m *Operation) GetOperationId() string { + if m != nil { + return m.OperationId + } + return "" +} + +func (m *Operation) GetParameters() []*ParameterOrReference { + if m != nil { + return m.Parameters + } + return nil +} + +func (m *Operation) GetRequestBody() *RequestBodyOrReference { + if m != nil { + return m.RequestBody + } + return nil +} + +func (m *Operation) GetResponses() *Responses { + if m != nil { + return m.Responses + } + return nil +} + +func (m *Operation) GetCallbacks() *CallbacksOrReferences { + if m != nil { + return m.Callbacks + } + return nil +} + +func (m *Operation) GetDeprecated() bool { + if m != nil { + return m.Deprecated + } + return false +} + +func (m *Operation) GetSecurity() []*SecurityRequirement { + if m != nil { + return m.Security + } + return nil +} + +func (m *Operation) GetServers() []*Server { + if m != nil { + return m.Servers + } + return nil +} + +func (m *Operation) GetSpecificationExtension() []*NamedAny { + if m != nil { + return m.SpecificationExtension + } + return nil +} + +// Describes a single operation parameter. A unique parameter is defined by a combination of a name and location. +type Parameter struct { + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + In string `protobuf:"bytes,2,opt,name=in" json:"in,omitempty"` + Description string `protobuf:"bytes,3,opt,name=description" json:"description,omitempty"` + Required bool `protobuf:"varint,4,opt,name=required" json:"required,omitempty"` + Deprecated bool `protobuf:"varint,5,opt,name=deprecated" json:"deprecated,omitempty"` + AllowEmptyValue bool `protobuf:"varint,6,opt,name=allow_empty_value,json=allowEmptyValue" json:"allow_empty_value,omitempty"` + Style string `protobuf:"bytes,7,opt,name=style" json:"style,omitempty"` + Explode bool `protobuf:"varint,8,opt,name=explode" json:"explode,omitempty"` + AllowReserved bool `protobuf:"varint,9,opt,name=allow_reserved,json=allowReserved" json:"allow_reserved,omitempty"` + Schema *SchemaOrReference `protobuf:"bytes,10,opt,name=schema" json:"schema,omitempty"` + Example *Any `protobuf:"bytes,11,opt,name=example" json:"example,omitempty"` + Examples *ExamplesOrReferences `protobuf:"bytes,12,opt,name=examples" json:"examples,omitempty"` + Content *MediaTypes `protobuf:"bytes,13,opt,name=content" json:"content,omitempty"` + SpecificationExtension []*NamedAny `protobuf:"bytes,14,rep,name=specification_extension,json=specificationExtension" json:"specification_extension,omitempty"` +} + +func (m *Parameter) Reset() { *m = Parameter{} } +func (m *Parameter) String() string { return proto.CompactTextString(m) } +func (*Parameter) ProtoMessage() {} +func (*Parameter) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{51} } + +func (m *Parameter) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Parameter) GetIn() string { + if m != nil { + return m.In + } + return "" +} + +func (m *Parameter) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Parameter) GetRequired() bool { + if m != nil { + return m.Required + } + return false +} + +func (m *Parameter) GetDeprecated() bool { + if m != nil { + return m.Deprecated + } + return false +} + +func (m *Parameter) GetAllowEmptyValue() bool { + if m != nil { + return m.AllowEmptyValue + } + return false +} + +func (m *Parameter) GetStyle() string { + if m != nil { + return m.Style + } + return "" +} + +func (m *Parameter) GetExplode() bool { + if m != nil { + return m.Explode + } + return false +} + +func (m *Parameter) GetAllowReserved() bool { + if m != nil { + return m.AllowReserved + } + return false +} + +func (m *Parameter) GetSchema() *SchemaOrReference { + if m != nil { + return m.Schema + } + return nil +} + +func (m *Parameter) GetExample() *Any { + if m != nil { + return m.Example + } + return nil +} + +func (m *Parameter) GetExamples() *ExamplesOrReferences { + if m != nil { + return m.Examples + } + return nil +} + +func (m *Parameter) GetContent() *MediaTypes { + if m != nil { + return m.Content + } + return nil +} + +func (m *Parameter) GetSpecificationExtension() []*NamedAny { + if m != nil { + return m.SpecificationExtension + } + return nil +} + +type ParameterOrReference struct { + // Types that are valid to be assigned to Oneof: + // *ParameterOrReference_Parameter + // *ParameterOrReference_Reference + Oneof isParameterOrReference_Oneof `protobuf_oneof:"oneof"` +} + +func (m *ParameterOrReference) Reset() { *m = ParameterOrReference{} } +func (m *ParameterOrReference) String() string { return proto.CompactTextString(m) } +func (*ParameterOrReference) ProtoMessage() {} +func (*ParameterOrReference) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{52} } + +type isParameterOrReference_Oneof interface { + isParameterOrReference_Oneof() +} + +type ParameterOrReference_Parameter struct { + Parameter *Parameter `protobuf:"bytes,1,opt,name=parameter,oneof"` +} +type ParameterOrReference_Reference struct { + Reference *Reference `protobuf:"bytes,2,opt,name=reference,oneof"` +} + +func (*ParameterOrReference_Parameter) isParameterOrReference_Oneof() {} +func (*ParameterOrReference_Reference) isParameterOrReference_Oneof() {} + +func (m *ParameterOrReference) GetOneof() isParameterOrReference_Oneof { + if m != nil { + return m.Oneof + } + return nil +} + +func (m *ParameterOrReference) GetParameter() *Parameter { + if x, ok := m.GetOneof().(*ParameterOrReference_Parameter); ok { + return x.Parameter + } + return nil +} + +func (m *ParameterOrReference) GetReference() *Reference { + if x, ok := m.GetOneof().(*ParameterOrReference_Reference); ok { + return x.Reference + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ParameterOrReference) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ParameterOrReference_OneofMarshaler, _ParameterOrReference_OneofUnmarshaler, _ParameterOrReference_OneofSizer, []interface{}{ + (*ParameterOrReference_Parameter)(nil), + (*ParameterOrReference_Reference)(nil), + } +} + +func _ParameterOrReference_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ParameterOrReference) + // oneof + switch x := m.Oneof.(type) { + case *ParameterOrReference_Parameter: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Parameter); err != nil { + return err + } + case *ParameterOrReference_Reference: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Reference); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ParameterOrReference.Oneof has unexpected type %T", x) + } + return nil +} + +func _ParameterOrReference_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ParameterOrReference) + switch tag { + case 1: // oneof.parameter + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Parameter) + err := b.DecodeMessage(msg) + m.Oneof = &ParameterOrReference_Parameter{msg} + return true, err + case 2: // oneof.reference + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Reference) + err := b.DecodeMessage(msg) + m.Oneof = &ParameterOrReference_Reference{msg} + return true, err + default: + return false, nil + } +} + +func _ParameterOrReference_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ParameterOrReference) + // oneof + switch x := m.Oneof.(type) { + case *ParameterOrReference_Parameter: + s := proto.Size(x.Parameter) + n += proto.SizeVarint(1<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *ParameterOrReference_Reference: + s := proto.Size(x.Reference) + n += proto.SizeVarint(2<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type ParametersOrReferences struct { + AdditionalProperties []*NamedParameterOrReference `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` +} + +func (m *ParametersOrReferences) Reset() { *m = ParametersOrReferences{} } +func (m *ParametersOrReferences) String() string { return proto.CompactTextString(m) } +func (*ParametersOrReferences) ProtoMessage() {} +func (*ParametersOrReferences) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{53} } + +func (m *ParametersOrReferences) GetAdditionalProperties() []*NamedParameterOrReference { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +// Describes the operations available on a single path. A Path Item MAY be empty, due to ACL constraints. The path itself is still exposed to the documentation viewer but they will not know which operations and parameters are available. +type PathItem struct { + XRef string `protobuf:"bytes,1,opt,name=_ref,json=Ref" json:"_ref,omitempty"` + Summary string `protobuf:"bytes,2,opt,name=summary" json:"summary,omitempty"` + Description string `protobuf:"bytes,3,opt,name=description" json:"description,omitempty"` + Get *Operation `protobuf:"bytes,4,opt,name=get" json:"get,omitempty"` + Put *Operation `protobuf:"bytes,5,opt,name=put" json:"put,omitempty"` + Post *Operation `protobuf:"bytes,6,opt,name=post" json:"post,omitempty"` + Delete *Operation `protobuf:"bytes,7,opt,name=delete" json:"delete,omitempty"` + Options *Operation `protobuf:"bytes,8,opt,name=options" json:"options,omitempty"` + Head *Operation `protobuf:"bytes,9,opt,name=head" json:"head,omitempty"` + Patch *Operation `protobuf:"bytes,10,opt,name=patch" json:"patch,omitempty"` + Trace *Operation `protobuf:"bytes,11,opt,name=trace" json:"trace,omitempty"` + Servers []*Server `protobuf:"bytes,12,rep,name=servers" json:"servers,omitempty"` + Parameters []*ParameterOrReference `protobuf:"bytes,13,rep,name=parameters" json:"parameters,omitempty"` + SpecificationExtension []*NamedAny `protobuf:"bytes,14,rep,name=specification_extension,json=specificationExtension" json:"specification_extension,omitempty"` +} + +func (m *PathItem) Reset() { *m = PathItem{} } +func (m *PathItem) String() string { return proto.CompactTextString(m) } +func (*PathItem) ProtoMessage() {} +func (*PathItem) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{54} } + +func (m *PathItem) GetXRef() string { + if m != nil { + return m.XRef + } + return "" +} + +func (m *PathItem) GetSummary() string { + if m != nil { + return m.Summary + } + return "" +} + +func (m *PathItem) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *PathItem) GetGet() *Operation { + if m != nil { + return m.Get + } + return nil +} + +func (m *PathItem) GetPut() *Operation { + if m != nil { + return m.Put + } + return nil +} + +func (m *PathItem) GetPost() *Operation { + if m != nil { + return m.Post + } + return nil +} + +func (m *PathItem) GetDelete() *Operation { + if m != nil { + return m.Delete + } + return nil +} + +func (m *PathItem) GetOptions() *Operation { + if m != nil { + return m.Options + } + return nil +} + +func (m *PathItem) GetHead() *Operation { + if m != nil { + return m.Head + } + return nil +} + +func (m *PathItem) GetPatch() *Operation { + if m != nil { + return m.Patch + } + return nil +} + +func (m *PathItem) GetTrace() *Operation { + if m != nil { + return m.Trace + } + return nil +} + +func (m *PathItem) GetServers() []*Server { + if m != nil { + return m.Servers + } + return nil +} + +func (m *PathItem) GetParameters() []*ParameterOrReference { + if m != nil { + return m.Parameters + } + return nil +} + +func (m *PathItem) GetSpecificationExtension() []*NamedAny { + if m != nil { + return m.SpecificationExtension + } + return nil +} + +// Holds the relative paths to the individual endpoints and their operations. The path is appended to the URL from the `Server Object` in order to construct the full URL. The Paths MAY be empty, due to ACL constraints. +type Paths struct { + Path []*NamedPathItem `protobuf:"bytes,1,rep,name=path" json:"path,omitempty"` + SpecificationExtension []*NamedAny `protobuf:"bytes,2,rep,name=specification_extension,json=specificationExtension" json:"specification_extension,omitempty"` +} + +func (m *Paths) Reset() { *m = Paths{} } +func (m *Paths) String() string { return proto.CompactTextString(m) } +func (*Paths) ProtoMessage() {} +func (*Paths) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{55} } + +func (m *Paths) GetPath() []*NamedPathItem { + if m != nil { + return m.Path + } + return nil +} + +func (m *Paths) GetSpecificationExtension() []*NamedAny { + if m != nil { + return m.SpecificationExtension + } + return nil +} + +type Properties struct { + AdditionalProperties []*NamedSchemaOrReference `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` +} + +func (m *Properties) Reset() { *m = Properties{} } +func (m *Properties) String() string { return proto.CompactTextString(m) } +func (*Properties) ProtoMessage() {} +func (*Properties) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{56} } + +func (m *Properties) GetAdditionalProperties() []*NamedSchemaOrReference { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +// A simple object to allow referencing other components in the specification, internally and externally. The Reference Object is defined by JSON Reference and follows the same structure, behavior and rules. For this specification, reference resolution is accomplished as defined by the JSON Reference specification and not by the JSON Schema specification. +type Reference struct { + XRef string `protobuf:"bytes,1,opt,name=_ref,json=Ref" json:"_ref,omitempty"` +} + +func (m *Reference) Reset() { *m = Reference{} } +func (m *Reference) String() string { return proto.CompactTextString(m) } +func (*Reference) ProtoMessage() {} +func (*Reference) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{57} } + +func (m *Reference) GetXRef() string { + if m != nil { + return m.XRef + } + return "" +} + +type RequestBodiesOrReferences struct { + AdditionalProperties []*NamedRequestBodyOrReference `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` +} + +func (m *RequestBodiesOrReferences) Reset() { *m = RequestBodiesOrReferences{} } +func (m *RequestBodiesOrReferences) String() string { return proto.CompactTextString(m) } +func (*RequestBodiesOrReferences) ProtoMessage() {} +func (*RequestBodiesOrReferences) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{58} } + +func (m *RequestBodiesOrReferences) GetAdditionalProperties() []*NamedRequestBodyOrReference { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +// Describes a single request body. +type RequestBody struct { + Description string `protobuf:"bytes,1,opt,name=description" json:"description,omitempty"` + Content *MediaTypes `protobuf:"bytes,2,opt,name=content" json:"content,omitempty"` + Required bool `protobuf:"varint,3,opt,name=required" json:"required,omitempty"` + SpecificationExtension []*NamedAny `protobuf:"bytes,4,rep,name=specification_extension,json=specificationExtension" json:"specification_extension,omitempty"` +} + +func (m *RequestBody) Reset() { *m = RequestBody{} } +func (m *RequestBody) String() string { return proto.CompactTextString(m) } +func (*RequestBody) ProtoMessage() {} +func (*RequestBody) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{59} } + +func (m *RequestBody) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *RequestBody) GetContent() *MediaTypes { + if m != nil { + return m.Content + } + return nil +} + +func (m *RequestBody) GetRequired() bool { + if m != nil { + return m.Required + } + return false +} + +func (m *RequestBody) GetSpecificationExtension() []*NamedAny { + if m != nil { + return m.SpecificationExtension + } + return nil +} + +type RequestBodyOrReference struct { + // Types that are valid to be assigned to Oneof: + // *RequestBodyOrReference_RequestBody + // *RequestBodyOrReference_Reference + Oneof isRequestBodyOrReference_Oneof `protobuf_oneof:"oneof"` +} + +func (m *RequestBodyOrReference) Reset() { *m = RequestBodyOrReference{} } +func (m *RequestBodyOrReference) String() string { return proto.CompactTextString(m) } +func (*RequestBodyOrReference) ProtoMessage() {} +func (*RequestBodyOrReference) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{60} } + +type isRequestBodyOrReference_Oneof interface { + isRequestBodyOrReference_Oneof() +} + +type RequestBodyOrReference_RequestBody struct { + RequestBody *RequestBody `protobuf:"bytes,1,opt,name=request_body,json=requestBody,oneof"` +} +type RequestBodyOrReference_Reference struct { + Reference *Reference `protobuf:"bytes,2,opt,name=reference,oneof"` +} + +func (*RequestBodyOrReference_RequestBody) isRequestBodyOrReference_Oneof() {} +func (*RequestBodyOrReference_Reference) isRequestBodyOrReference_Oneof() {} + +func (m *RequestBodyOrReference) GetOneof() isRequestBodyOrReference_Oneof { + if m != nil { + return m.Oneof + } + return nil +} + +func (m *RequestBodyOrReference) GetRequestBody() *RequestBody { + if x, ok := m.GetOneof().(*RequestBodyOrReference_RequestBody); ok { + return x.RequestBody + } + return nil +} + +func (m *RequestBodyOrReference) GetReference() *Reference { + if x, ok := m.GetOneof().(*RequestBodyOrReference_Reference); ok { + return x.Reference + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*RequestBodyOrReference) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _RequestBodyOrReference_OneofMarshaler, _RequestBodyOrReference_OneofUnmarshaler, _RequestBodyOrReference_OneofSizer, []interface{}{ + (*RequestBodyOrReference_RequestBody)(nil), + (*RequestBodyOrReference_Reference)(nil), + } +} + +func _RequestBodyOrReference_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*RequestBodyOrReference) + // oneof + switch x := m.Oneof.(type) { + case *RequestBodyOrReference_RequestBody: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.RequestBody); err != nil { + return err + } + case *RequestBodyOrReference_Reference: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Reference); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("RequestBodyOrReference.Oneof has unexpected type %T", x) + } + return nil +} + +func _RequestBodyOrReference_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*RequestBodyOrReference) + switch tag { + case 1: // oneof.request_body + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(RequestBody) + err := b.DecodeMessage(msg) + m.Oneof = &RequestBodyOrReference_RequestBody{msg} + return true, err + case 2: // oneof.reference + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Reference) + err := b.DecodeMessage(msg) + m.Oneof = &RequestBodyOrReference_Reference{msg} + return true, err + default: + return false, nil + } +} + +func _RequestBodyOrReference_OneofSizer(msg proto.Message) (n int) { + m := msg.(*RequestBodyOrReference) + // oneof + switch x := m.Oneof.(type) { + case *RequestBodyOrReference_RequestBody: + s := proto.Size(x.RequestBody) + n += proto.SizeVarint(1<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *RequestBodyOrReference_Reference: + s := proto.Size(x.Reference) + n += proto.SizeVarint(2<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Describes a single response from an API Operation, including design-time, static `links` to operations based on the response. +type Response struct { + Description string `protobuf:"bytes,1,opt,name=description" json:"description,omitempty"` + Headers *HeadersOrReferences `protobuf:"bytes,2,opt,name=headers" json:"headers,omitempty"` + Content *MediaTypes `protobuf:"bytes,3,opt,name=content" json:"content,omitempty"` + Links *LinksOrReferences `protobuf:"bytes,4,opt,name=links" json:"links,omitempty"` + SpecificationExtension []*NamedAny `protobuf:"bytes,5,rep,name=specification_extension,json=specificationExtension" json:"specification_extension,omitempty"` +} + +func (m *Response) Reset() { *m = Response{} } +func (m *Response) String() string { return proto.CompactTextString(m) } +func (*Response) ProtoMessage() {} +func (*Response) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{61} } + +func (m *Response) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Response) GetHeaders() *HeadersOrReferences { + if m != nil { + return m.Headers + } + return nil +} + +func (m *Response) GetContent() *MediaTypes { + if m != nil { + return m.Content + } + return nil +} + +func (m *Response) GetLinks() *LinksOrReferences { + if m != nil { + return m.Links + } + return nil +} + +func (m *Response) GetSpecificationExtension() []*NamedAny { + if m != nil { + return m.SpecificationExtension + } + return nil +} + +type ResponseOrReference struct { + // Types that are valid to be assigned to Oneof: + // *ResponseOrReference_Response + // *ResponseOrReference_Reference + Oneof isResponseOrReference_Oneof `protobuf_oneof:"oneof"` +} + +func (m *ResponseOrReference) Reset() { *m = ResponseOrReference{} } +func (m *ResponseOrReference) String() string { return proto.CompactTextString(m) } +func (*ResponseOrReference) ProtoMessage() {} +func (*ResponseOrReference) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{62} } + +type isResponseOrReference_Oneof interface { + isResponseOrReference_Oneof() +} + +type ResponseOrReference_Response struct { + Response *Response `protobuf:"bytes,1,opt,name=response,oneof"` +} +type ResponseOrReference_Reference struct { + Reference *Reference `protobuf:"bytes,2,opt,name=reference,oneof"` +} + +func (*ResponseOrReference_Response) isResponseOrReference_Oneof() {} +func (*ResponseOrReference_Reference) isResponseOrReference_Oneof() {} + +func (m *ResponseOrReference) GetOneof() isResponseOrReference_Oneof { + if m != nil { + return m.Oneof + } + return nil +} + +func (m *ResponseOrReference) GetResponse() *Response { + if x, ok := m.GetOneof().(*ResponseOrReference_Response); ok { + return x.Response + } + return nil +} + +func (m *ResponseOrReference) GetReference() *Reference { + if x, ok := m.GetOneof().(*ResponseOrReference_Reference); ok { + return x.Reference + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ResponseOrReference) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ResponseOrReference_OneofMarshaler, _ResponseOrReference_OneofUnmarshaler, _ResponseOrReference_OneofSizer, []interface{}{ + (*ResponseOrReference_Response)(nil), + (*ResponseOrReference_Reference)(nil), + } +} + +func _ResponseOrReference_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ResponseOrReference) + // oneof + switch x := m.Oneof.(type) { + case *ResponseOrReference_Response: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Response); err != nil { + return err + } + case *ResponseOrReference_Reference: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Reference); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ResponseOrReference.Oneof has unexpected type %T", x) + } + return nil +} + +func _ResponseOrReference_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ResponseOrReference) + switch tag { + case 1: // oneof.response + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Response) + err := b.DecodeMessage(msg) + m.Oneof = &ResponseOrReference_Response{msg} + return true, err + case 2: // oneof.reference + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Reference) + err := b.DecodeMessage(msg) + m.Oneof = &ResponseOrReference_Reference{msg} + return true, err + default: + return false, nil + } +} + +func _ResponseOrReference_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ResponseOrReference) + // oneof + switch x := m.Oneof.(type) { + case *ResponseOrReference_Response: + s := proto.Size(x.Response) + n += proto.SizeVarint(1<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *ResponseOrReference_Reference: + s := proto.Size(x.Reference) + n += proto.SizeVarint(2<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A container for the expected responses of an operation. The container maps a HTTP response code to the expected response. The documentation is not necessarily expected to cover all possible HTTP response codes because they may not be known in advance. However, documentation is expected to cover a successful operation response and any known errors. The `default` MAY be used as a default response object for all HTTP codes that are not covered individually by the specification. The `Responses Object` MUST contain at least one response code, and it SHOULD be the response for a successful operation call. +type Responses struct { + Default *ResponseOrReference `protobuf:"bytes,1,opt,name=default" json:"default,omitempty"` + ResponseOrReference []*NamedResponseOrReference `protobuf:"bytes,2,rep,name=response_or_reference,json=responseOrReference" json:"response_or_reference,omitempty"` + SpecificationExtension []*NamedAny `protobuf:"bytes,3,rep,name=specification_extension,json=specificationExtension" json:"specification_extension,omitempty"` +} + +func (m *Responses) Reset() { *m = Responses{} } +func (m *Responses) String() string { return proto.CompactTextString(m) } +func (*Responses) ProtoMessage() {} +func (*Responses) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{63} } + +func (m *Responses) GetDefault() *ResponseOrReference { + if m != nil { + return m.Default + } + return nil +} + +func (m *Responses) GetResponseOrReference() []*NamedResponseOrReference { + if m != nil { + return m.ResponseOrReference + } + return nil +} + +func (m *Responses) GetSpecificationExtension() []*NamedAny { + if m != nil { + return m.SpecificationExtension + } + return nil +} + +type ResponsesOrReferences struct { + AdditionalProperties []*NamedResponseOrReference `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` +} + +func (m *ResponsesOrReferences) Reset() { *m = ResponsesOrReferences{} } +func (m *ResponsesOrReferences) String() string { return proto.CompactTextString(m) } +func (*ResponsesOrReferences) ProtoMessage() {} +func (*ResponsesOrReferences) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{64} } + +func (m *ResponsesOrReferences) GetAdditionalProperties() []*NamedResponseOrReference { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +// The Schema Object allows the definition of input and output data types. These types can be objects, but also primitives and arrays. This object is an extended subset of the JSON Schema Specification Wright Draft 00. For more information about the properties, see JSON Schema Core and JSON Schema Validation. Unless stated otherwise, the property definitions follow the JSON Schema. +type Schema struct { + Nullable bool `protobuf:"varint,1,opt,name=nullable" json:"nullable,omitempty"` + Discriminator *Discriminator `protobuf:"bytes,2,opt,name=discriminator" json:"discriminator,omitempty"` + ReadOnly bool `protobuf:"varint,3,opt,name=read_only,json=readOnly" json:"read_only,omitempty"` + WriteOnly bool `protobuf:"varint,4,opt,name=write_only,json=writeOnly" json:"write_only,omitempty"` + Xml *Xml `protobuf:"bytes,5,opt,name=xml" json:"xml,omitempty"` + ExternalDocs *ExternalDocs `protobuf:"bytes,6,opt,name=external_docs,json=externalDocs" json:"external_docs,omitempty"` + Example *Any `protobuf:"bytes,7,opt,name=example" json:"example,omitempty"` + Deprecated bool `protobuf:"varint,8,opt,name=deprecated" json:"deprecated,omitempty"` + Title string `protobuf:"bytes,9,opt,name=title" json:"title,omitempty"` + MultipleOf float64 `protobuf:"fixed64,10,opt,name=multiple_of,json=multipleOf" json:"multiple_of,omitempty"` + Maximum float64 `protobuf:"fixed64,11,opt,name=maximum" json:"maximum,omitempty"` + ExclusiveMaximum bool `protobuf:"varint,12,opt,name=exclusive_maximum,json=exclusiveMaximum" json:"exclusive_maximum,omitempty"` + Minimum float64 `protobuf:"fixed64,13,opt,name=minimum" json:"minimum,omitempty"` + ExclusiveMinimum bool `protobuf:"varint,14,opt,name=exclusive_minimum,json=exclusiveMinimum" json:"exclusive_minimum,omitempty"` + MaxLength int64 `protobuf:"varint,15,opt,name=max_length,json=maxLength" json:"max_length,omitempty"` + MinLength int64 `protobuf:"varint,16,opt,name=min_length,json=minLength" json:"min_length,omitempty"` + Pattern string `protobuf:"bytes,17,opt,name=pattern" json:"pattern,omitempty"` + MaxItems int64 `protobuf:"varint,18,opt,name=max_items,json=maxItems" json:"max_items,omitempty"` + MinItems int64 `protobuf:"varint,19,opt,name=min_items,json=minItems" json:"min_items,omitempty"` + UniqueItems bool `protobuf:"varint,20,opt,name=unique_items,json=uniqueItems" json:"unique_items,omitempty"` + MaxProperties int64 `protobuf:"varint,21,opt,name=max_properties,json=maxProperties" json:"max_properties,omitempty"` + MinProperties int64 `protobuf:"varint,22,opt,name=min_properties,json=minProperties" json:"min_properties,omitempty"` + Required []string `protobuf:"bytes,23,rep,name=required" json:"required,omitempty"` + Enum []*Any `protobuf:"bytes,24,rep,name=enum" json:"enum,omitempty"` + Type string `protobuf:"bytes,25,opt,name=type" json:"type,omitempty"` + AllOf []*SchemaOrReference `protobuf:"bytes,26,rep,name=all_of,json=allOf" json:"all_of,omitempty"` + OneOf []*SchemaOrReference `protobuf:"bytes,27,rep,name=one_of,json=oneOf" json:"one_of,omitempty"` + AnyOf []*SchemaOrReference `protobuf:"bytes,28,rep,name=any_of,json=anyOf" json:"any_of,omitempty"` + Not *Schema `protobuf:"bytes,29,opt,name=not" json:"not,omitempty"` + Items *ItemsItem `protobuf:"bytes,30,opt,name=items" json:"items,omitempty"` + Properties *Properties `protobuf:"bytes,31,opt,name=properties" json:"properties,omitempty"` + AdditionalProperties *AdditionalPropertiesItem `protobuf:"bytes,32,opt,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` + Default *DefaultType `protobuf:"bytes,33,opt,name=default" json:"default,omitempty"` + Description string `protobuf:"bytes,34,opt,name=description" json:"description,omitempty"` + Format string `protobuf:"bytes,35,opt,name=format" json:"format,omitempty"` + SpecificationExtension []*NamedAny `protobuf:"bytes,36,rep,name=specification_extension,json=specificationExtension" json:"specification_extension,omitempty"` +} + +func (m *Schema) Reset() { *m = Schema{} } +func (m *Schema) String() string { return proto.CompactTextString(m) } +func (*Schema) ProtoMessage() {} +func (*Schema) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{65} } + +func (m *Schema) GetNullable() bool { + if m != nil { + return m.Nullable + } + return false +} + +func (m *Schema) GetDiscriminator() *Discriminator { + if m != nil { + return m.Discriminator + } + return nil +} + +func (m *Schema) GetReadOnly() bool { + if m != nil { + return m.ReadOnly + } + return false +} + +func (m *Schema) GetWriteOnly() bool { + if m != nil { + return m.WriteOnly + } + return false +} + +func (m *Schema) GetXml() *Xml { + if m != nil { + return m.Xml + } + return nil +} + +func (m *Schema) GetExternalDocs() *ExternalDocs { + if m != nil { + return m.ExternalDocs + } + return nil +} + +func (m *Schema) GetExample() *Any { + if m != nil { + return m.Example + } + return nil +} + +func (m *Schema) GetDeprecated() bool { + if m != nil { + return m.Deprecated + } + return false +} + +func (m *Schema) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *Schema) GetMultipleOf() float64 { + if m != nil { + return m.MultipleOf + } + return 0 +} + +func (m *Schema) GetMaximum() float64 { + if m != nil { + return m.Maximum + } + return 0 +} + +func (m *Schema) GetExclusiveMaximum() bool { + if m != nil { + return m.ExclusiveMaximum + } + return false +} + +func (m *Schema) GetMinimum() float64 { + if m != nil { + return m.Minimum + } + return 0 +} + +func (m *Schema) GetExclusiveMinimum() bool { + if m != nil { + return m.ExclusiveMinimum + } + return false +} + +func (m *Schema) GetMaxLength() int64 { + if m != nil { + return m.MaxLength + } + return 0 +} + +func (m *Schema) GetMinLength() int64 { + if m != nil { + return m.MinLength + } + return 0 +} + +func (m *Schema) GetPattern() string { + if m != nil { + return m.Pattern + } + return "" +} + +func (m *Schema) GetMaxItems() int64 { + if m != nil { + return m.MaxItems + } + return 0 +} + +func (m *Schema) GetMinItems() int64 { + if m != nil { + return m.MinItems + } + return 0 +} + +func (m *Schema) GetUniqueItems() bool { + if m != nil { + return m.UniqueItems + } + return false +} + +func (m *Schema) GetMaxProperties() int64 { + if m != nil { + return m.MaxProperties + } + return 0 +} + +func (m *Schema) GetMinProperties() int64 { + if m != nil { + return m.MinProperties + } + return 0 +} + +func (m *Schema) GetRequired() []string { + if m != nil { + return m.Required + } + return nil +} + +func (m *Schema) GetEnum() []*Any { + if m != nil { + return m.Enum + } + return nil +} + +func (m *Schema) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *Schema) GetAllOf() []*SchemaOrReference { + if m != nil { + return m.AllOf + } + return nil +} + +func (m *Schema) GetOneOf() []*SchemaOrReference { + if m != nil { + return m.OneOf + } + return nil +} + +func (m *Schema) GetAnyOf() []*SchemaOrReference { + if m != nil { + return m.AnyOf + } + return nil +} + +func (m *Schema) GetNot() *Schema { + if m != nil { + return m.Not + } + return nil +} + +func (m *Schema) GetItems() *ItemsItem { + if m != nil { + return m.Items + } + return nil +} + +func (m *Schema) GetProperties() *Properties { + if m != nil { + return m.Properties + } + return nil +} + +func (m *Schema) GetAdditionalProperties() *AdditionalPropertiesItem { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +func (m *Schema) GetDefault() *DefaultType { + if m != nil { + return m.Default + } + return nil +} + +func (m *Schema) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Schema) GetFormat() string { + if m != nil { + return m.Format + } + return "" +} + +func (m *Schema) GetSpecificationExtension() []*NamedAny { + if m != nil { + return m.SpecificationExtension + } + return nil +} + +type SchemaOrReference struct { + // Types that are valid to be assigned to Oneof: + // *SchemaOrReference_Schema + // *SchemaOrReference_Reference + Oneof isSchemaOrReference_Oneof `protobuf_oneof:"oneof"` +} + +func (m *SchemaOrReference) Reset() { *m = SchemaOrReference{} } +func (m *SchemaOrReference) String() string { return proto.CompactTextString(m) } +func (*SchemaOrReference) ProtoMessage() {} +func (*SchemaOrReference) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{66} } + +type isSchemaOrReference_Oneof interface { + isSchemaOrReference_Oneof() +} + +type SchemaOrReference_Schema struct { + Schema *Schema `protobuf:"bytes,1,opt,name=schema,oneof"` +} +type SchemaOrReference_Reference struct { + Reference *Reference `protobuf:"bytes,2,opt,name=reference,oneof"` +} + +func (*SchemaOrReference_Schema) isSchemaOrReference_Oneof() {} +func (*SchemaOrReference_Reference) isSchemaOrReference_Oneof() {} + +func (m *SchemaOrReference) GetOneof() isSchemaOrReference_Oneof { + if m != nil { + return m.Oneof + } + return nil +} + +func (m *SchemaOrReference) GetSchema() *Schema { + if x, ok := m.GetOneof().(*SchemaOrReference_Schema); ok { + return x.Schema + } + return nil +} + +func (m *SchemaOrReference) GetReference() *Reference { + if x, ok := m.GetOneof().(*SchemaOrReference_Reference); ok { + return x.Reference + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*SchemaOrReference) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _SchemaOrReference_OneofMarshaler, _SchemaOrReference_OneofUnmarshaler, _SchemaOrReference_OneofSizer, []interface{}{ + (*SchemaOrReference_Schema)(nil), + (*SchemaOrReference_Reference)(nil), + } +} + +func _SchemaOrReference_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*SchemaOrReference) + // oneof + switch x := m.Oneof.(type) { + case *SchemaOrReference_Schema: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Schema); err != nil { + return err + } + case *SchemaOrReference_Reference: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Reference); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("SchemaOrReference.Oneof has unexpected type %T", x) + } + return nil +} + +func _SchemaOrReference_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*SchemaOrReference) + switch tag { + case 1: // oneof.schema + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Schema) + err := b.DecodeMessage(msg) + m.Oneof = &SchemaOrReference_Schema{msg} + return true, err + case 2: // oneof.reference + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Reference) + err := b.DecodeMessage(msg) + m.Oneof = &SchemaOrReference_Reference{msg} + return true, err + default: + return false, nil + } +} + +func _SchemaOrReference_OneofSizer(msg proto.Message) (n int) { + m := msg.(*SchemaOrReference) + // oneof + switch x := m.Oneof.(type) { + case *SchemaOrReference_Schema: + s := proto.Size(x.Schema) + n += proto.SizeVarint(1<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *SchemaOrReference_Reference: + s := proto.Size(x.Reference) + n += proto.SizeVarint(2<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type SchemasOrReferences struct { + AdditionalProperties []*NamedSchemaOrReference `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` +} + +func (m *SchemasOrReferences) Reset() { *m = SchemasOrReferences{} } +func (m *SchemasOrReferences) String() string { return proto.CompactTextString(m) } +func (*SchemasOrReferences) ProtoMessage() {} +func (*SchemasOrReferences) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{67} } + +func (m *SchemasOrReferences) GetAdditionalProperties() []*NamedSchemaOrReference { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +// Lists the required security schemes to execute this operation. The name used for each property MUST correspond to a security scheme declared in the Security Schemes under the Components Object. Security Requirement Objects that contain multiple schemes require that all schemes MUST be satisfied for a request to be authorized. This enables support for scenarios where multiple query parameters or HTTP headers are required to convey security information. When a list of Security Requirement Objects is defined on the Open API object or Operation Object, only one of Security Requirement Objects in the list needs to be satisfied to authorize the request. +type SecurityRequirement struct { +} + +func (m *SecurityRequirement) Reset() { *m = SecurityRequirement{} } +func (m *SecurityRequirement) String() string { return proto.CompactTextString(m) } +func (*SecurityRequirement) ProtoMessage() {} +func (*SecurityRequirement) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{68} } + +// Defines a security scheme that can be used by the operations. Supported schemes are HTTP authentication, an API key (either as a header or as a query parameter), OAuth2's common flows (implicit, password, application and access code) as defined in RFC6749, and OpenID Connect Discovery. +type SecurityScheme struct { + Type string `protobuf:"bytes,1,opt,name=type" json:"type,omitempty"` + Description string `protobuf:"bytes,2,opt,name=description" json:"description,omitempty"` + Name string `protobuf:"bytes,3,opt,name=name" json:"name,omitempty"` + In string `protobuf:"bytes,4,opt,name=in" json:"in,omitempty"` + Scheme string `protobuf:"bytes,5,opt,name=scheme" json:"scheme,omitempty"` + BearerFormat string `protobuf:"bytes,6,opt,name=bearer_format,json=bearerFormat" json:"bearer_format,omitempty"` + Flows *OauthFlows `protobuf:"bytes,7,opt,name=flows" json:"flows,omitempty"` + OpenIdConnectUrl string `protobuf:"bytes,8,opt,name=open_id_connect_url,json=openIdConnectUrl" json:"open_id_connect_url,omitempty"` + SpecificationExtension []*NamedAny `protobuf:"bytes,9,rep,name=specification_extension,json=specificationExtension" json:"specification_extension,omitempty"` +} + +func (m *SecurityScheme) Reset() { *m = SecurityScheme{} } +func (m *SecurityScheme) String() string { return proto.CompactTextString(m) } +func (*SecurityScheme) ProtoMessage() {} +func (*SecurityScheme) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{69} } + +func (m *SecurityScheme) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *SecurityScheme) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *SecurityScheme) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *SecurityScheme) GetIn() string { + if m != nil { + return m.In + } + return "" +} + +func (m *SecurityScheme) GetScheme() string { + if m != nil { + return m.Scheme + } + return "" +} + +func (m *SecurityScheme) GetBearerFormat() string { + if m != nil { + return m.BearerFormat + } + return "" +} + +func (m *SecurityScheme) GetFlows() *OauthFlows { + if m != nil { + return m.Flows + } + return nil +} + +func (m *SecurityScheme) GetOpenIdConnectUrl() string { + if m != nil { + return m.OpenIdConnectUrl + } + return "" +} + +func (m *SecurityScheme) GetSpecificationExtension() []*NamedAny { + if m != nil { + return m.SpecificationExtension + } + return nil +} + +type SecuritySchemeOrReference struct { + // Types that are valid to be assigned to Oneof: + // *SecuritySchemeOrReference_SecurityScheme + // *SecuritySchemeOrReference_Reference + Oneof isSecuritySchemeOrReference_Oneof `protobuf_oneof:"oneof"` +} + +func (m *SecuritySchemeOrReference) Reset() { *m = SecuritySchemeOrReference{} } +func (m *SecuritySchemeOrReference) String() string { return proto.CompactTextString(m) } +func (*SecuritySchemeOrReference) ProtoMessage() {} +func (*SecuritySchemeOrReference) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{70} } + +type isSecuritySchemeOrReference_Oneof interface { + isSecuritySchemeOrReference_Oneof() +} + +type SecuritySchemeOrReference_SecurityScheme struct { + SecurityScheme *SecurityScheme `protobuf:"bytes,1,opt,name=security_scheme,json=securityScheme,oneof"` +} +type SecuritySchemeOrReference_Reference struct { + Reference *Reference `protobuf:"bytes,2,opt,name=reference,oneof"` +} + +func (*SecuritySchemeOrReference_SecurityScheme) isSecuritySchemeOrReference_Oneof() {} +func (*SecuritySchemeOrReference_Reference) isSecuritySchemeOrReference_Oneof() {} + +func (m *SecuritySchemeOrReference) GetOneof() isSecuritySchemeOrReference_Oneof { + if m != nil { + return m.Oneof + } + return nil +} + +func (m *SecuritySchemeOrReference) GetSecurityScheme() *SecurityScheme { + if x, ok := m.GetOneof().(*SecuritySchemeOrReference_SecurityScheme); ok { + return x.SecurityScheme + } + return nil +} + +func (m *SecuritySchemeOrReference) GetReference() *Reference { + if x, ok := m.GetOneof().(*SecuritySchemeOrReference_Reference); ok { + return x.Reference + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*SecuritySchemeOrReference) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _SecuritySchemeOrReference_OneofMarshaler, _SecuritySchemeOrReference_OneofUnmarshaler, _SecuritySchemeOrReference_OneofSizer, []interface{}{ + (*SecuritySchemeOrReference_SecurityScheme)(nil), + (*SecuritySchemeOrReference_Reference)(nil), + } +} + +func _SecuritySchemeOrReference_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*SecuritySchemeOrReference) + // oneof + switch x := m.Oneof.(type) { + case *SecuritySchemeOrReference_SecurityScheme: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SecurityScheme); err != nil { + return err + } + case *SecuritySchemeOrReference_Reference: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Reference); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("SecuritySchemeOrReference.Oneof has unexpected type %T", x) + } + return nil +} + +func _SecuritySchemeOrReference_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*SecuritySchemeOrReference) + switch tag { + case 1: // oneof.security_scheme + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(SecurityScheme) + err := b.DecodeMessage(msg) + m.Oneof = &SecuritySchemeOrReference_SecurityScheme{msg} + return true, err + case 2: // oneof.reference + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Reference) + err := b.DecodeMessage(msg) + m.Oneof = &SecuritySchemeOrReference_Reference{msg} + return true, err + default: + return false, nil + } +} + +func _SecuritySchemeOrReference_OneofSizer(msg proto.Message) (n int) { + m := msg.(*SecuritySchemeOrReference) + // oneof + switch x := m.Oneof.(type) { + case *SecuritySchemeOrReference_SecurityScheme: + s := proto.Size(x.SecurityScheme) + n += proto.SizeVarint(1<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *SecuritySchemeOrReference_Reference: + s := proto.Size(x.Reference) + n += proto.SizeVarint(2<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type SecuritySchemesOrReferences struct { + AdditionalProperties []*NamedSecuritySchemeOrReference `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` +} + +func (m *SecuritySchemesOrReferences) Reset() { *m = SecuritySchemesOrReferences{} } +func (m *SecuritySchemesOrReferences) String() string { return proto.CompactTextString(m) } +func (*SecuritySchemesOrReferences) ProtoMessage() {} +func (*SecuritySchemesOrReferences) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{71} } + +func (m *SecuritySchemesOrReferences) GetAdditionalProperties() []*NamedSecuritySchemeOrReference { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +// An object representing a Server. +type Server struct { + Url string `protobuf:"bytes,1,opt,name=url" json:"url,omitempty"` + Description string `protobuf:"bytes,2,opt,name=description" json:"description,omitempty"` + Variables *ServerVariables `protobuf:"bytes,3,opt,name=variables" json:"variables,omitempty"` + SpecificationExtension []*NamedAny `protobuf:"bytes,4,rep,name=specification_extension,json=specificationExtension" json:"specification_extension,omitempty"` +} + +func (m *Server) Reset() { *m = Server{} } +func (m *Server) String() string { return proto.CompactTextString(m) } +func (*Server) ProtoMessage() {} +func (*Server) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{72} } + +func (m *Server) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *Server) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Server) GetVariables() *ServerVariables { + if m != nil { + return m.Variables + } + return nil +} + +func (m *Server) GetSpecificationExtension() []*NamedAny { + if m != nil { + return m.SpecificationExtension + } + return nil +} + +// An object representing a Server Variable for server URL template substitution. +type ServerVariable struct { + Enum []string `protobuf:"bytes,1,rep,name=enum" json:"enum,omitempty"` + Default string `protobuf:"bytes,2,opt,name=default" json:"default,omitempty"` + Description string `protobuf:"bytes,3,opt,name=description" json:"description,omitempty"` + SpecificationExtension []*NamedAny `protobuf:"bytes,4,rep,name=specification_extension,json=specificationExtension" json:"specification_extension,omitempty"` +} + +func (m *ServerVariable) Reset() { *m = ServerVariable{} } +func (m *ServerVariable) String() string { return proto.CompactTextString(m) } +func (*ServerVariable) ProtoMessage() {} +func (*ServerVariable) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{73} } + +func (m *ServerVariable) GetEnum() []string { + if m != nil { + return m.Enum + } + return nil +} + +func (m *ServerVariable) GetDefault() string { + if m != nil { + return m.Default + } + return "" +} + +func (m *ServerVariable) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *ServerVariable) GetSpecificationExtension() []*NamedAny { + if m != nil { + return m.SpecificationExtension + } + return nil +} + +type ServerVariables struct { + AdditionalProperties []*NamedServerVariable `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` +} + +func (m *ServerVariables) Reset() { *m = ServerVariables{} } +func (m *ServerVariables) String() string { return proto.CompactTextString(m) } +func (*ServerVariables) ProtoMessage() {} +func (*ServerVariables) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{74} } + +func (m *ServerVariables) GetAdditionalProperties() []*NamedServerVariable { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +// Any property starting with x- is valid. +type SpecificationExtension struct { + // Types that are valid to be assigned to Oneof: + // *SpecificationExtension_Number + // *SpecificationExtension_Boolean + // *SpecificationExtension_String_ + Oneof isSpecificationExtension_Oneof `protobuf_oneof:"oneof"` +} + +func (m *SpecificationExtension) Reset() { *m = SpecificationExtension{} } +func (m *SpecificationExtension) String() string { return proto.CompactTextString(m) } +func (*SpecificationExtension) ProtoMessage() {} +func (*SpecificationExtension) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{75} } + +type isSpecificationExtension_Oneof interface { + isSpecificationExtension_Oneof() +} + +type SpecificationExtension_Number struct { + Number float64 `protobuf:"fixed64,1,opt,name=number,oneof"` +} +type SpecificationExtension_Boolean struct { + Boolean bool `protobuf:"varint,2,opt,name=boolean,oneof"` +} +type SpecificationExtension_String_ struct { + String_ string `protobuf:"bytes,3,opt,name=string,oneof"` +} + +func (*SpecificationExtension_Number) isSpecificationExtension_Oneof() {} +func (*SpecificationExtension_Boolean) isSpecificationExtension_Oneof() {} +func (*SpecificationExtension_String_) isSpecificationExtension_Oneof() {} + +func (m *SpecificationExtension) GetOneof() isSpecificationExtension_Oneof { + if m != nil { + return m.Oneof + } + return nil +} + +func (m *SpecificationExtension) GetNumber() float64 { + if x, ok := m.GetOneof().(*SpecificationExtension_Number); ok { + return x.Number + } + return 0 +} + +func (m *SpecificationExtension) GetBoolean() bool { + if x, ok := m.GetOneof().(*SpecificationExtension_Boolean); ok { + return x.Boolean + } + return false +} + +func (m *SpecificationExtension) GetString_() string { + if x, ok := m.GetOneof().(*SpecificationExtension_String_); ok { + return x.String_ + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*SpecificationExtension) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _SpecificationExtension_OneofMarshaler, _SpecificationExtension_OneofUnmarshaler, _SpecificationExtension_OneofSizer, []interface{}{ + (*SpecificationExtension_Number)(nil), + (*SpecificationExtension_Boolean)(nil), + (*SpecificationExtension_String_)(nil), + } +} + +func _SpecificationExtension_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*SpecificationExtension) + // oneof + switch x := m.Oneof.(type) { + case *SpecificationExtension_Number: + b.EncodeVarint(1<<3 | proto.WireFixed64) + b.EncodeFixed64(math.Float64bits(x.Number)) + case *SpecificationExtension_Boolean: + t := uint64(0) + if x.Boolean { + t = 1 + } + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *SpecificationExtension_String_: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.String_) + case nil: + default: + return fmt.Errorf("SpecificationExtension.Oneof has unexpected type %T", x) + } + return nil +} + +func _SpecificationExtension_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*SpecificationExtension) + switch tag { + case 1: // oneof.number + if wire != proto.WireFixed64 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed64() + m.Oneof = &SpecificationExtension_Number{math.Float64frombits(x)} + return true, err + case 2: // oneof.boolean + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Oneof = &SpecificationExtension_Boolean{x != 0} + return true, err + case 3: // oneof.string + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Oneof = &SpecificationExtension_String_{x} + return true, err + default: + return false, nil + } +} + +func _SpecificationExtension_OneofSizer(msg proto.Message) (n int) { + m := msg.(*SpecificationExtension) + // oneof + switch x := m.Oneof.(type) { + case *SpecificationExtension_Number: + n += proto.SizeVarint(1<<3 | proto.WireFixed64) + n += 8 + case *SpecificationExtension_Boolean: + n += proto.SizeVarint(2<<3 | proto.WireVarint) + n += 1 + case *SpecificationExtension_String_: + n += proto.SizeVarint(3<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(len(x.String_))) + n += len(x.String_) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type StringArray struct { + Value []string `protobuf:"bytes,1,rep,name=value" json:"value,omitempty"` +} + +func (m *StringArray) Reset() { *m = StringArray{} } +func (m *StringArray) String() string { return proto.CompactTextString(m) } +func (*StringArray) ProtoMessage() {} +func (*StringArray) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{76} } + +func (m *StringArray) GetValue() []string { + if m != nil { + return m.Value + } + return nil +} + +type Strings struct { + AdditionalProperties []*NamedString `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` +} + +func (m *Strings) Reset() { *m = Strings{} } +func (m *Strings) String() string { return proto.CompactTextString(m) } +func (*Strings) ProtoMessage() {} +func (*Strings) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{77} } + +func (m *Strings) GetAdditionalProperties() []*NamedString { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +// Adds metadata to a single tag that is used by the Operation Object. It is not mandatory to have a Tag Object per tag defined in the Operation Object instances. +type Tag struct { + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Description string `protobuf:"bytes,2,opt,name=description" json:"description,omitempty"` + ExternalDocs *ExternalDocs `protobuf:"bytes,3,opt,name=external_docs,json=externalDocs" json:"external_docs,omitempty"` + SpecificationExtension []*NamedAny `protobuf:"bytes,4,rep,name=specification_extension,json=specificationExtension" json:"specification_extension,omitempty"` +} + +func (m *Tag) Reset() { *m = Tag{} } +func (m *Tag) String() string { return proto.CompactTextString(m) } +func (*Tag) ProtoMessage() {} +func (*Tag) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{78} } + +func (m *Tag) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Tag) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Tag) GetExternalDocs() *ExternalDocs { + if m != nil { + return m.ExternalDocs + } + return nil +} + +func (m *Tag) GetSpecificationExtension() []*NamedAny { + if m != nil { + return m.SpecificationExtension + } + return nil +} + +// A metadata object that allows for more fine-tuned XML model definitions. When using arrays, XML element names are *not* inferred (for singular/plural forms) and the `name` property SHOULD be used to add that information. See examples for expected behavior. +type Xml struct { + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Namespace string `protobuf:"bytes,2,opt,name=namespace" json:"namespace,omitempty"` + Prefix string `protobuf:"bytes,3,opt,name=prefix" json:"prefix,omitempty"` + Attribute bool `protobuf:"varint,4,opt,name=attribute" json:"attribute,omitempty"` + Wrapped bool `protobuf:"varint,5,opt,name=wrapped" json:"wrapped,omitempty"` + SpecificationExtension []*NamedAny `protobuf:"bytes,6,rep,name=specification_extension,json=specificationExtension" json:"specification_extension,omitempty"` +} + +func (m *Xml) Reset() { *m = Xml{} } +func (m *Xml) String() string { return proto.CompactTextString(m) } +func (*Xml) ProtoMessage() {} +func (*Xml) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{79} } + +func (m *Xml) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Xml) GetNamespace() string { + if m != nil { + return m.Namespace + } + return "" +} + +func (m *Xml) GetPrefix() string { + if m != nil { + return m.Prefix + } + return "" +} + +func (m *Xml) GetAttribute() bool { + if m != nil { + return m.Attribute + } + return false +} + +func (m *Xml) GetWrapped() bool { + if m != nil { + return m.Wrapped + } + return false +} + +func (m *Xml) GetSpecificationExtension() []*NamedAny { + if m != nil { + return m.SpecificationExtension + } + return nil +} + +func init() { + proto.RegisterType((*AdditionalPropertiesItem)(nil), "openapi.v3.AdditionalPropertiesItem") + proto.RegisterType((*Any)(nil), "openapi.v3.Any") + proto.RegisterType((*AnyOrExpression)(nil), "openapi.v3.AnyOrExpression") + proto.RegisterType((*AnysOrExpressions)(nil), "openapi.v3.AnysOrExpressions") + proto.RegisterType((*Callback)(nil), "openapi.v3.Callback") + proto.RegisterType((*CallbackOrReference)(nil), "openapi.v3.CallbackOrReference") + proto.RegisterType((*CallbacksOrReferences)(nil), "openapi.v3.CallbacksOrReferences") + proto.RegisterType((*Components)(nil), "openapi.v3.Components") + proto.RegisterType((*Contact)(nil), "openapi.v3.Contact") + proto.RegisterType((*DefaultType)(nil), "openapi.v3.DefaultType") + proto.RegisterType((*Discriminator)(nil), "openapi.v3.Discriminator") + proto.RegisterType((*Document)(nil), "openapi.v3.Document") + proto.RegisterType((*Encoding)(nil), "openapi.v3.Encoding") + proto.RegisterType((*Encodings)(nil), "openapi.v3.Encodings") + proto.RegisterType((*Example)(nil), "openapi.v3.Example") + proto.RegisterType((*ExampleOrReference)(nil), "openapi.v3.ExampleOrReference") + proto.RegisterType((*Examples)(nil), "openapi.v3.Examples") + proto.RegisterType((*ExamplesOrReferences)(nil), "openapi.v3.ExamplesOrReferences") + proto.RegisterType((*Expression)(nil), "openapi.v3.Expression") + proto.RegisterType((*ExternalDocs)(nil), "openapi.v3.ExternalDocs") + proto.RegisterType((*Header)(nil), "openapi.v3.Header") + proto.RegisterType((*HeaderOrReference)(nil), "openapi.v3.HeaderOrReference") + proto.RegisterType((*HeadersOrReferences)(nil), "openapi.v3.HeadersOrReferences") + proto.RegisterType((*Info)(nil), "openapi.v3.Info") + proto.RegisterType((*ItemsItem)(nil), "openapi.v3.ItemsItem") + proto.RegisterType((*License)(nil), "openapi.v3.License") + proto.RegisterType((*Link)(nil), "openapi.v3.Link") + proto.RegisterType((*LinkOrReference)(nil), "openapi.v3.LinkOrReference") + proto.RegisterType((*LinksOrReferences)(nil), "openapi.v3.LinksOrReferences") + proto.RegisterType((*MediaType)(nil), "openapi.v3.MediaType") + proto.RegisterType((*MediaTypes)(nil), "openapi.v3.MediaTypes") + proto.RegisterType((*NamedAny)(nil), "openapi.v3.NamedAny") + proto.RegisterType((*NamedAnyOrExpression)(nil), "openapi.v3.NamedAnyOrExpression") + proto.RegisterType((*NamedCallbackOrReference)(nil), "openapi.v3.NamedCallbackOrReference") + proto.RegisterType((*NamedEncoding)(nil), "openapi.v3.NamedEncoding") + proto.RegisterType((*NamedExampleOrReference)(nil), "openapi.v3.NamedExampleOrReference") + proto.RegisterType((*NamedHeaderOrReference)(nil), "openapi.v3.NamedHeaderOrReference") + proto.RegisterType((*NamedLinkOrReference)(nil), "openapi.v3.NamedLinkOrReference") + proto.RegisterType((*NamedMediaType)(nil), "openapi.v3.NamedMediaType") + proto.RegisterType((*NamedParameterOrReference)(nil), "openapi.v3.NamedParameterOrReference") + proto.RegisterType((*NamedPathItem)(nil), "openapi.v3.NamedPathItem") + proto.RegisterType((*NamedRequestBodyOrReference)(nil), "openapi.v3.NamedRequestBodyOrReference") + proto.RegisterType((*NamedResponseOrReference)(nil), "openapi.v3.NamedResponseOrReference") + proto.RegisterType((*NamedSchemaOrReference)(nil), "openapi.v3.NamedSchemaOrReference") + proto.RegisterType((*NamedSecuritySchemeOrReference)(nil), "openapi.v3.NamedSecuritySchemeOrReference") + proto.RegisterType((*NamedServerVariable)(nil), "openapi.v3.NamedServerVariable") + proto.RegisterType((*NamedString)(nil), "openapi.v3.NamedString") + proto.RegisterType((*OauthFlow)(nil), "openapi.v3.OauthFlow") + proto.RegisterType((*OauthFlows)(nil), "openapi.v3.OauthFlows") + proto.RegisterType((*Object)(nil), "openapi.v3.Object") + proto.RegisterType((*Operation)(nil), "openapi.v3.Operation") + proto.RegisterType((*Parameter)(nil), "openapi.v3.Parameter") + proto.RegisterType((*ParameterOrReference)(nil), "openapi.v3.ParameterOrReference") + proto.RegisterType((*ParametersOrReferences)(nil), "openapi.v3.ParametersOrReferences") + proto.RegisterType((*PathItem)(nil), "openapi.v3.PathItem") + proto.RegisterType((*Paths)(nil), "openapi.v3.Paths") + proto.RegisterType((*Properties)(nil), "openapi.v3.Properties") + proto.RegisterType((*Reference)(nil), "openapi.v3.Reference") + proto.RegisterType((*RequestBodiesOrReferences)(nil), "openapi.v3.RequestBodiesOrReferences") + proto.RegisterType((*RequestBody)(nil), "openapi.v3.RequestBody") + proto.RegisterType((*RequestBodyOrReference)(nil), "openapi.v3.RequestBodyOrReference") + proto.RegisterType((*Response)(nil), "openapi.v3.Response") + proto.RegisterType((*ResponseOrReference)(nil), "openapi.v3.ResponseOrReference") + proto.RegisterType((*Responses)(nil), "openapi.v3.Responses") + proto.RegisterType((*ResponsesOrReferences)(nil), "openapi.v3.ResponsesOrReferences") + proto.RegisterType((*Schema)(nil), "openapi.v3.Schema") + proto.RegisterType((*SchemaOrReference)(nil), "openapi.v3.SchemaOrReference") + proto.RegisterType((*SchemasOrReferences)(nil), "openapi.v3.SchemasOrReferences") + proto.RegisterType((*SecurityRequirement)(nil), "openapi.v3.SecurityRequirement") + proto.RegisterType((*SecurityScheme)(nil), "openapi.v3.SecurityScheme") + proto.RegisterType((*SecuritySchemeOrReference)(nil), "openapi.v3.SecuritySchemeOrReference") + proto.RegisterType((*SecuritySchemesOrReferences)(nil), "openapi.v3.SecuritySchemesOrReferences") + proto.RegisterType((*Server)(nil), "openapi.v3.Server") + proto.RegisterType((*ServerVariable)(nil), "openapi.v3.ServerVariable") + proto.RegisterType((*ServerVariables)(nil), "openapi.v3.ServerVariables") + proto.RegisterType((*SpecificationExtension)(nil), "openapi.v3.SpecificationExtension") + proto.RegisterType((*StringArray)(nil), "openapi.v3.StringArray") + proto.RegisterType((*Strings)(nil), "openapi.v3.Strings") + proto.RegisterType((*Tag)(nil), "openapi.v3.Tag") + proto.RegisterType((*Xml)(nil), "openapi.v3.Xml") +} + +func init() { proto.RegisterFile("OpenAPIv3/OpenAPIv3.proto", fileDescriptor0) } + +var fileDescriptor0 = []byte{ + // 3544 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0xc4, 0x5b, 0x49, 0x6f, 0x1c, 0x47, + 0x96, 0x66, 0xd6, 0x5e, 0xaf, 0xb8, 0x06, 0x17, 0x95, 0x24, 0x4b, 0xa2, 0x48, 0xc9, 0x92, 0xb5, + 0xda, 0x92, 0xe5, 0xb1, 0x61, 0x7b, 0x3c, 0x94, 0x44, 0x83, 0x02, 0x24, 0x97, 0x26, 0x25, 0xdb, + 0x1a, 0x7b, 0x8c, 0x42, 0x30, 0x2b, 0x8a, 0x4c, 0x2b, 0x37, 0x65, 0x66, 0x49, 0xac, 0x39, 0x18, + 0x98, 0x83, 0x81, 0xf1, 0x00, 0x33, 0x98, 0x01, 0x1a, 0xbe, 0xf5, 0xc5, 0xe7, 0x6e, 0x18, 0xfd, + 0x27, 0x1a, 0x30, 0xd0, 0x7d, 0x6a, 0xa0, 0x7f, 0x40, 0x1f, 0xfb, 0xd2, 0x40, 0x1f, 0xfb, 0xd4, + 0x88, 0x2d, 0x97, 0xca, 0xc8, 0x24, 0xab, 0x58, 0x46, 0x9f, 0xc8, 0x8c, 0xf7, 0xc5, 0x8b, 0xed, + 0x6d, 0xf1, 0x5e, 0x14, 0x9c, 0xec, 0x78, 0xc4, 0xd9, 0x7a, 0xfc, 0xe0, 0xe5, 0xed, 0x9b, 0xd1, + 0x7f, 0x37, 0x3c, 0xdf, 0x0d, 0x5d, 0x04, 0xae, 0x47, 0x1c, 0xec, 0x99, 0x37, 0x5e, 0xde, 0x3e, + 0x75, 0x72, 0xcf, 0x75, 0xf7, 0x2c, 0x72, 0x93, 0x51, 0x76, 0x07, 0xfd, 0x9b, 0xd8, 0x19, 0x72, + 0xd8, 0xc6, 0xff, 0x69, 0xd0, 0xde, 0xea, 0xf5, 0xcc, 0xd0, 0x74, 0x1d, 0x6c, 0x3d, 0xf6, 0x5d, + 0x8f, 0xf8, 0xa1, 0x49, 0x82, 0x07, 0x21, 0xb1, 0x51, 0x07, 0x96, 0x03, 0x63, 0x9f, 0xd8, 0xb8, + 0xeb, 0xfa, 0x5d, 0x9f, 0xf4, 0x89, 0x4f, 0x1c, 0x83, 0xb4, 0xb5, 0x75, 0xed, 0x72, 0xeb, 0xd6, + 0x99, 0x1b, 0xf1, 0x08, 0x37, 0x9e, 0x30, 0x58, 0xc7, 0xd7, 0x25, 0x68, 0x67, 0x46, 0x5f, 0x0a, + 0x46, 0x1b, 0xd1, 0x29, 0xa8, 0xef, 0xba, 0xae, 0x45, 0xb0, 0xd3, 0x2e, 0xad, 0x6b, 0x97, 0x1b, + 0x3b, 0x33, 0xba, 0x6c, 0xb8, 0x5b, 0x87, 0xaa, 0xeb, 0x10, 0xb7, 0xbf, 0xb1, 0x0d, 0xe5, 0x2d, + 0x67, 0x88, 0xae, 0x40, 0xf5, 0x25, 0xb6, 0x06, 0x72, 0xb8, 0x95, 0x1b, 0x7c, 0x11, 0x37, 0xe4, + 0x22, 0x6e, 0x6c, 0x39, 0x43, 0x9d, 0x43, 0x10, 0x82, 0xca, 0x10, 0xdb, 0x16, 0x63, 0xda, 0xd4, + 0xd9, 0xff, 0x1b, 0x43, 0x58, 0xd8, 0x72, 0x86, 0x1d, 0x7f, 0xfb, 0xc0, 0xf3, 0x49, 0x10, 0x98, + 0xae, 0x83, 0x36, 0xa1, 0x8c, 0x9d, 0xa1, 0x60, 0xb8, 0x90, 0x9c, 0xff, 0x96, 0x33, 0xdc, 0x99, + 0xd1, 0x29, 0x15, 0xbd, 0x0b, 0x40, 0xa2, 0x2e, 0x8c, 0x63, 0xeb, 0xd6, 0x5a, 0x12, 0x1b, 0x33, + 0xdc, 0x99, 0xd1, 0x13, 0xd8, 0x78, 0x05, 0x5f, 0xc3, 0xd2, 0x96, 0x33, 0x0c, 0x92, 0x63, 0x07, + 0xe8, 0x53, 0x58, 0xc5, 0xd1, 0x46, 0x77, 0xbd, 0x68, 0xa7, 0xdb, 0xda, 0x7a, 0xf9, 0x72, 0xeb, + 0xd6, 0x7a, 0x72, 0x88, 0x4f, 0xb0, 0x4d, 0x7a, 0x23, 0xb3, 0xd7, 0x57, 0xb0, 0xe2, 0x9c, 0x36, + 0xfe, 0x4b, 0x83, 0xc6, 0x3d, 0x6c, 0x59, 0xbb, 0xd8, 0x78, 0x8e, 0xae, 0x43, 0xc5, 0xc3, 0xe1, + 0xbe, 0x60, 0x79, 0x32, 0xc3, 0xf2, 0x31, 0x0e, 0xf7, 0xe9, 0xc9, 0xea, 0x0c, 0x86, 0x1e, 0xc1, + 0x89, 0xc0, 0x23, 0x86, 0xd9, 0x37, 0x0d, 0x4c, 0x19, 0x77, 0xc9, 0x41, 0x48, 0x1c, 0xb1, 0xee, + 0x32, 0xdb, 0x74, 0xc5, 0xa4, 0xf4, 0xb5, 0x54, 0xa7, 0x6d, 0xd9, 0x67, 0xe3, 0x3b, 0x0d, 0x96, + 0xe5, 0x54, 0x92, 0xa7, 0x7e, 0x0b, 0x1a, 0x86, 0x68, 0x8e, 0x0e, 0x33, 0xc1, 0x57, 0x76, 0xd9, + 0x99, 0xd1, 0x23, 0x1c, 0xba, 0x03, 0xcd, 0x58, 0xe0, 0xf8, 0x21, 0xac, 0x26, 0x3b, 0x25, 0x05, + 0x2d, 0x46, 0xc6, 0x47, 0xe0, 0xc3, 0xaa, 0xe4, 0x1b, 0x24, 0xe6, 0x12, 0xa0, 0x7f, 0x2b, 0x3e, + 0x86, 0x0b, 0x99, 0x15, 0x2b, 0x56, 0x94, 0x73, 0x14, 0xbf, 0xaa, 0x02, 0xdc, 0x73, 0x6d, 0xcf, + 0x75, 0x88, 0x13, 0x06, 0xe8, 0x3d, 0xa8, 0x73, 0x0d, 0x08, 0xc4, 0xaa, 0xcf, 0x65, 0x35, 0x26, + 0x35, 0x37, 0x5d, 0xe2, 0xd1, 0x47, 0x74, 0xf5, 0x81, 0xe7, 0x3a, 0x01, 0x09, 0xc4, 0xea, 0xcf, + 0xa7, 0x57, 0x2f, 0x88, 0xa9, 0xee, 0x71, 0x1f, 0x74, 0x17, 0xc0, 0xc3, 0x3e, 0xb6, 0x49, 0x48, + 0xfc, 0xa0, 0x5d, 0x66, 0x1c, 0x36, 0x92, 0x1c, 0x1e, 0x47, 0xd4, 0x14, 0x8b, 0x44, 0x2f, 0xf4, + 0x01, 0x34, 0xc8, 0x01, 0xb6, 0x3d, 0x8b, 0x04, 0xed, 0x0a, 0xe3, 0xb0, 0x9e, 0x56, 0x03, 0x4e, + 0x4b, 0xf5, 0x8f, 0x7a, 0xa0, 0x87, 0x30, 0xef, 0x93, 0x17, 0x03, 0x12, 0x84, 0xdd, 0x5d, 0xb7, + 0x47, 0x37, 0xb8, 0xca, 0x78, 0x5c, 0x4c, 0xaf, 0x83, 0x21, 0xee, 0x32, 0x40, 0x8a, 0xd1, 0x9c, + 0x9f, 0x24, 0xd1, 0xbd, 0xdc, 0x27, 0xb8, 0x47, 0x17, 0x53, 0xcb, 0xee, 0xe5, 0x0e, 0x27, 0xa5, + 0xf7, 0x52, 0xe0, 0x91, 0x0e, 0x8b, 0x01, 0x31, 0x06, 0xbe, 0x19, 0x0e, 0xbb, 0x6c, 0x7f, 0x49, + 0xd0, 0xae, 0x33, 0x1e, 0x97, 0x52, 0xe7, 0x21, 0x30, 0x4f, 0x38, 0x24, 0xc5, 0x6b, 0x21, 0x48, + 0x13, 0xd1, 0x6d, 0xa8, 0x5a, 0xa6, 0xf3, 0x3c, 0x68, 0x37, 0xb2, 0xa6, 0xf0, 0x21, 0x25, 0xa4, + 0xba, 0x73, 0x2c, 0x3d, 0x54, 0x29, 0xde, 0x41, 0xbb, 0x99, 0x3d, 0x54, 0xa5, 0xbc, 0xea, 0x71, + 0x9f, 0x22, 0x75, 0x85, 0x09, 0xd4, 0xf5, 0x17, 0x1a, 0xd4, 0xef, 0xb9, 0x4e, 0x88, 0x8d, 0x90, + 0x1a, 0x50, 0x07, 0xdb, 0xdc, 0xd6, 0x36, 0x75, 0xf6, 0x3f, 0x5a, 0x84, 0xf2, 0xc0, 0x97, 0x36, + 0x95, 0xfe, 0x8b, 0x56, 0xa0, 0x4a, 0x6c, 0x6c, 0x5a, 0x4c, 0xa0, 0x9a, 0x3a, 0xff, 0x28, 0x9a, + 0x56, 0x65, 0x82, 0x69, 0xf5, 0xa1, 0x75, 0x9f, 0xf4, 0xf1, 0xc0, 0x0a, 0x9f, 0x0e, 0x3d, 0x82, + 0xda, 0x50, 0x73, 0x06, 0xf6, 0x2e, 0xf1, 0xd9, 0xdc, 0xb4, 0x9d, 0x19, 0x5d, 0x7c, 0x17, 0x39, + 0x13, 0xda, 0x2b, 0x08, 0x7d, 0xd3, 0xd9, 0xe3, 0x53, 0xa5, 0xbd, 0xf8, 0x77, 0x6c, 0x21, 0x0c, + 0x98, 0xbb, 0x6f, 0x06, 0x86, 0x6f, 0xda, 0xa6, 0x83, 0x43, 0xd7, 0x47, 0x9b, 0x30, 0x27, 0xcc, + 0xc1, 0xb0, 0x9b, 0xd8, 0x8c, 0x59, 0xd9, 0x48, 0x67, 0x8e, 0xae, 0x43, 0xdd, 0xc6, 0x9e, 0x47, + 0x39, 0x73, 0xbd, 0x5c, 0x4e, 0x09, 0x11, 0x1b, 0x23, 0xd0, 0x25, 0x66, 0xe3, 0xa7, 0x32, 0x34, + 0xee, 0xbb, 0xc6, 0xc0, 0x26, 0x4e, 0x88, 0xda, 0x50, 0x17, 0x58, 0xc1, 0x5a, 0x7e, 0xa2, 0x0b, + 0x50, 0x31, 0x9d, 0xbe, 0x2b, 0x58, 0x2e, 0x26, 0x59, 0x3e, 0x70, 0xfa, 0xae, 0xce, 0xa8, 0xe8, + 0x1a, 0xd4, 0x03, 0xe2, 0xbf, 0xe4, 0x1a, 0x4d, 0x37, 0x16, 0xa5, 0x05, 0x98, 0x92, 0x74, 0x09, + 0x41, 0x97, 0xa0, 0x4a, 0x8d, 0xbc, 0xd4, 0xdd, 0xa5, 0xb4, 0xf6, 0x87, 0xfb, 0x81, 0xce, 0xe9, + 0xe8, 0x1d, 0x00, 0x23, 0xb2, 0x5a, 0x42, 0x4b, 0x53, 0x0e, 0x2f, 0xb6, 0x69, 0x7a, 0x02, 0x89, + 0xde, 0x87, 0x86, 0xd4, 0x8b, 0x76, 0x8d, 0xcd, 0xe7, 0x9c, 0x4a, 0xa1, 0xa8, 0x8e, 0x9b, 0x3e, + 0xa1, 0x3b, 0xa0, 0x47, 0x1d, 0xd0, 0x26, 0x54, 0x42, 0xbc, 0x47, 0x35, 0xb1, 0x3c, 0xea, 0x8b, + 0x9f, 0xe2, 0x3d, 0x9d, 0x11, 0xd1, 0x87, 0x30, 0x47, 0x65, 0xc9, 0xa7, 0x96, 0xba, 0xe7, 0x1a, + 0x52, 0xdd, 0xda, 0x69, 0x33, 0xc4, 0x01, 0xf7, 0x5d, 0x23, 0xd0, 0x67, 0x49, 0xe2, 0xab, 0x48, + 0x30, 0x9b, 0x13, 0x08, 0xe6, 0xff, 0x96, 0xa0, 0xb1, 0xed, 0x18, 0x6e, 0xcf, 0x74, 0xf6, 0xd0, + 0x79, 0x98, 0x35, 0x5c, 0x27, 0x24, 0x4e, 0xd8, 0x0d, 0x87, 0x9e, 0x94, 0x95, 0x96, 0x68, 0x63, + 0x92, 0x9b, 0xb0, 0x59, 0xa5, 0x31, 0x6d, 0xd6, 0x0a, 0x54, 0x83, 0x70, 0x68, 0x11, 0xa9, 0x68, + 0xec, 0x83, 0xca, 0x0f, 0x39, 0xf0, 0x2c, 0xb7, 0x47, 0xd8, 0x99, 0x36, 0x74, 0xf9, 0x89, 0x2e, + 0xc2, 0x3c, 0xb6, 0x2c, 0xf7, 0x55, 0xd7, 0x27, 0xec, 0xf8, 0x7b, 0xec, 0x18, 0x1b, 0xfa, 0x1c, + 0x6b, 0xd5, 0x45, 0x63, 0xd1, 0x86, 0xd4, 0x26, 0xd8, 0x90, 0x2f, 0xa1, 0x29, 0xf7, 0x23, 0x40, + 0x9f, 0x14, 0xfb, 0xd5, 0x6c, 0x2c, 0x22, 0xbb, 0xe6, 0x38, 0xd3, 0x3f, 0x69, 0x50, 0x17, 0x2e, + 0x86, 0x2e, 0x3c, 0x18, 0xd8, 0x36, 0xf6, 0x87, 0x52, 0x71, 0xc4, 0x27, 0x5a, 0x87, 0x56, 0x8f, + 0x50, 0x25, 0xf6, 0x42, 0x19, 0xad, 0x35, 0xf5, 0x64, 0x13, 0xba, 0x28, 0xc3, 0xc8, 0xb2, 0x32, + 0xea, 0x93, 0x11, 0xe4, 0x45, 0x98, 0x8f, 0x44, 0x8d, 0xe3, 0x2b, 0x8c, 0x57, 0x24, 0x80, 0x9f, + 0x31, 0x58, 0xc1, 0x0e, 0x56, 0x27, 0xd8, 0xc1, 0x6f, 0x35, 0x40, 0x62, 0x91, 0xc9, 0x80, 0xe9, + 0x26, 0x3d, 0x68, 0xd6, 0x2a, 0x22, 0x87, 0x65, 0x85, 0xe3, 0xa5, 0xe6, 0x4e, 0xa0, 0x8e, 0x1d, + 0x2d, 0x01, 0x34, 0xa4, 0x3b, 0xdf, 0xf0, 0x60, 0x45, 0xe5, 0xda, 0xd1, 0xb3, 0xe2, 0x03, 0xde, + 0xcc, 0x1e, 0x70, 0x66, 0x61, 0x39, 0x47, 0xfd, 0x39, 0x40, 0x22, 0x48, 0x7f, 0x50, 0x3c, 0x8e, + 0x7a, 0x83, 0xd5, 0x8c, 0xff, 0x5f, 0x83, 0xd9, 0xa4, 0x7d, 0x18, 0x15, 0x17, 0x2d, 0x2b, 0x2e, + 0x59, 0xa7, 0x57, 0x70, 0xe4, 0xe5, 0x09, 0x8e, 0xfc, 0xbf, 0x2b, 0x50, 0xe3, 0xba, 0x7f, 0x84, + 0xd9, 0x9c, 0x82, 0x86, 0xcf, 0xcd, 0x67, 0x8f, 0xfb, 0x38, 0x3d, 0xfa, 0x46, 0x67, 0x01, 0x7a, + 0xc4, 0xf3, 0x89, 0x81, 0x43, 0xd2, 0x63, 0xd2, 0xdd, 0xd0, 0x13, 0x2d, 0xe8, 0x0a, 0x2c, 0x71, + 0x9b, 0x40, 0x6c, 0x2f, 0x1c, 0x26, 0x84, 0xba, 0xa1, 0x2f, 0x30, 0xc2, 0x36, 0x6d, 0xe7, 0x62, + 0x1d, 0xd9, 0x9b, 0x6a, 0x8e, 0xbd, 0xa9, 0x1d, 0x66, 0x6f, 0xea, 0x2a, 0x7b, 0x73, 0x07, 0x6a, + 0x3c, 0xa2, 0x55, 0xc5, 0x49, 0x99, 0x2b, 0xa3, 0x2e, 0xc0, 0xe8, 0x8d, 0x58, 0xfc, 0x9b, 0x6a, + 0xa5, 0x8d, 0x04, 0x3f, 0x19, 0xa3, 0xc2, 0xd8, 0x31, 0xea, 0x9b, 0x50, 0x17, 0x06, 0xbb, 0xdd, + 0xca, 0xba, 0xbd, 0x47, 0xa4, 0x67, 0x62, 0x6a, 0xc9, 0x03, 0x5d, 0xc2, 0x8a, 0x84, 0x61, 0x76, + 0x02, 0x61, 0xf8, 0x4f, 0x0d, 0x96, 0xb8, 0x30, 0x24, 0xd5, 0xff, 0x1a, 0xd4, 0xb8, 0x23, 0x10, + 0xda, 0x8f, 0xb2, 0x7e, 0x83, 0x06, 0x34, 0x1c, 0x73, 0x6c, 0xdd, 0x77, 0x60, 0x59, 0xe1, 0x8b, + 0xd0, 0xe7, 0xc5, 0x6a, 0xb8, 0x91, 0x59, 0x67, 0x66, 0x1d, 0x39, 0x4a, 0xf9, 0x63, 0x09, 0x2a, + 0x34, 0xa8, 0xa1, 0x42, 0x17, 0x9a, 0xa1, 0x25, 0x7d, 0x27, 0xff, 0x38, 0x82, 0x45, 0xbf, 0x0c, + 0x8b, 0x21, 0xf1, 0xed, 0xa0, 0xeb, 0xf6, 0xbb, 0x54, 0xd0, 0x4c, 0x43, 0xfa, 0xc9, 0x79, 0xd6, + 0xde, 0xe9, 0x3f, 0xe1, 0xad, 0x34, 0x58, 0x33, 0x78, 0x80, 0x2b, 0x82, 0xa0, 0xe5, 0x74, 0x58, + 0xc3, 0x48, 0xba, 0xc4, 0x50, 0xb8, 0x65, 0x1a, 0xc4, 0x09, 0x88, 0x88, 0x82, 0x96, 0xd3, 0x71, + 0x3d, 0x23, 0xe9, 0x12, 0x43, 0xd5, 0x83, 0x06, 0x5a, 0xdc, 0x7b, 0x32, 0xaf, 0x24, 0x3e, 0x8b, + 0xa4, 0xa4, 0x3e, 0x81, 0x94, 0x7c, 0x01, 0x4d, 0x7a, 0x69, 0xe7, 0x39, 0x99, 0x47, 0x79, 0x39, + 0x99, 0xf2, 0xe1, 0x0a, 0x96, 0xcd, 0xc8, 0x6c, 0x7c, 0x03, 0x75, 0xb1, 0xb0, 0x23, 0xde, 0x01, + 0xa6, 0x6c, 0x0e, 0xff, 0x5c, 0x82, 0x0a, 0xbd, 0x31, 0xd1, 0xe8, 0x9b, 0x8a, 0x08, 0xe7, 0xe9, + 0x93, 0xbe, 0x8c, 0xbe, 0xa3, 0x46, 0x9d, 0xf4, 0x69, 0xd4, 0x15, 0x83, 0xcc, 0x9e, 0x94, 0x8e, + 0xa8, 0xed, 0x41, 0x0f, 0x7d, 0xa8, 0xb8, 0xf9, 0x9e, 0x19, 0xb1, 0x1f, 0xe9, 0xcc, 0x4c, 0xea, + 0xd2, 0xfb, 0xcf, 0x30, 0x9b, 0xb8, 0xb6, 0x0e, 0x85, 0xdc, 0x9c, 0x1e, 0x61, 0x90, 0xca, 0xcb, + 0xb4, 0xe2, 0xab, 0x6a, 0x26, 0x20, 0xa9, 0x66, 0xc5, 0xf7, 0x0a, 0xd4, 0x78, 0x88, 0x2e, 0x6e, + 0xb2, 0xaa, 0x20, 0x5e, 0x20, 0xa6, 0x2d, 0x48, 0x43, 0x58, 0xa0, 0x7b, 0x9d, 0xb4, 0x35, 0xaf, + 0x43, 0x85, 0xde, 0x4e, 0x85, 0xa5, 0x59, 0x1c, 0xbd, 0xc8, 0xee, 0xcc, 0xe8, 0x8c, 0x7e, 0x6c, + 0x2b, 0xf3, 0x35, 0x2c, 0x65, 0x2e, 0xc6, 0xe3, 0xa7, 0xc4, 0x46, 0x66, 0x9f, 0x63, 0x61, 0x7e, + 0x5d, 0x82, 0x66, 0x64, 0xbc, 0x13, 0x4e, 0x48, 0x9b, 0xd0, 0x09, 0x95, 0xc6, 0x70, 0x42, 0xe5, + 0xb1, 0x9d, 0xd0, 0x5b, 0xd0, 0x20, 0x22, 0x14, 0x16, 0xd2, 0x96, 0xda, 0xd8, 0x28, 0xc2, 0xd6, + 0x23, 0xd8, 0xb4, 0xa3, 0xd0, 0xaf, 0x00, 0x62, 0x5f, 0x87, 0x3a, 0xc5, 0x87, 0x72, 0x2a, 0xc3, + 0x3a, 0xea, 0x9b, 0x73, 0x1c, 0xdb, 0xd0, 0x90, 0x53, 0x50, 0xda, 0x98, 0x28, 0x42, 0x2f, 0x15, + 0x45, 0xe8, 0x1b, 0x5f, 0xc1, 0x8a, 0x2a, 0x2d, 0xaa, 0x64, 0xf9, 0x56, 0x9a, 0x65, 0xa1, 0xfa, + 0x0a, 0xf6, 0x04, 0xda, 0x79, 0xe9, 0x3e, 0xe5, 0x10, 0x77, 0xd2, 0x43, 0x9c, 0x53, 0x65, 0x72, + 0x92, 0x72, 0x25, 0x86, 0xe9, 0xc0, 0x5c, 0xea, 0xf6, 0xa3, 0xe4, 0x7d, 0x25, 0xcd, 0x7b, 0x45, + 0x25, 0x0f, 0x92, 0xa1, 0x01, 0x27, 0x72, 0xa2, 0x6d, 0x25, 0xeb, 0xb7, 0xd3, 0xac, 0xcf, 0x2a, + 0x04, 0x55, 0x31, 0x6b, 0x0c, 0x6b, 0x6a, 0x1f, 0xaf, 0x1c, 0xe3, 0x76, 0x7a, 0x8c, 0x33, 0xd9, + 0xf0, 0x45, 0x31, 0x84, 0x3c, 0xde, 0x51, 0x03, 0x35, 0xee, 0xf1, 0x8e, 0x9a, 0x08, 0xc1, 0xfe, + 0x5f, 0x61, 0x3e, 0x2d, 0xac, 0x4a, 0xc6, 0x57, 0xd3, 0x8c, 0x57, 0x95, 0xe1, 0xa0, 0x64, 0xb9, + 0x07, 0x27, 0x45, 0x52, 0x5d, 0x78, 0x8f, 0xc3, 0xa6, 0xfd, 0x4e, 0x9a, 0xfb, 0xba, 0x32, 0x1f, + 0x5b, 0x20, 0x33, 0x32, 0x7b, 0x3f, 0xb6, 0xcc, 0x44, 0x69, 0x7f, 0xc1, 0xf0, 0x39, 0x9c, 0x66, + 0x0c, 0xe3, 0xf4, 0xeb, 0xf0, 0xb0, 0xb9, 0xbf, 0x9b, 0x66, 0xbf, 0xa1, 0xce, 0xe2, 0x0e, 0x15, + 0xb3, 0x97, 0x8a, 0x25, 0x73, 0xd6, 0xc7, 0x51, 0x2c, 0x05, 0x8f, 0x51, 0x11, 0xcd, 0x58, 0xf4, + 0xb1, 0x45, 0x34, 0xeb, 0x13, 0xc4, 0x10, 0x2f, 0xe0, 0x2c, 0x1f, 0x22, 0x95, 0x0d, 0x3e, 0x6c, + 0xa8, 0xf7, 0xd3, 0x43, 0x5d, 0xcc, 0x4f, 0x3a, 0x2b, 0x86, 0xfc, 0x12, 0x96, 0xc5, 0x90, 0x34, + 0x1e, 0xf8, 0x0c, 0xfb, 0x26, 0xde, 0xb5, 0xd4, 0xe3, 0xbc, 0x99, 0x1e, 0xe7, 0x54, 0x36, 0xac, + 0x90, 0xdd, 0x25, 0xf3, 0x7f, 0x82, 0x16, 0x67, 0xce, 0xb2, 0x96, 0x4a, 0xa6, 0x2b, 0x49, 0xa6, + 0x4d, 0xd9, 0xf1, 0xaf, 0x1a, 0x34, 0x3b, 0x78, 0x10, 0xee, 0x7f, 0x6c, 0xb9, 0xaf, 0xd0, 0x55, + 0x58, 0xa2, 0xff, 0xbb, 0xbe, 0xf9, 0x1f, 0xdc, 0x1b, 0xd1, 0x88, 0x91, 0x33, 0x59, 0x4c, 0x11, + 0x3e, 0xf5, 0x2d, 0x74, 0x1a, 0x9a, 0xa1, 0xfb, 0x9c, 0x70, 0x10, 0x67, 0xda, 0x60, 0x0d, 0x94, + 0x78, 0x0e, 0x5a, 0x3e, 0xe9, 0xfb, 0x24, 0xd8, 0x67, 0x64, 0x1e, 0xd4, 0x83, 0x68, 0xa2, 0x80, + 0xab, 0xd4, 0x97, 0xbb, 0x5e, 0x54, 0x90, 0x50, 0x26, 0x5f, 0x05, 0x64, 0xda, 0x5e, 0xf2, 0xf7, + 0x25, 0x80, 0x68, 0xd1, 0xcc, 0x6d, 0x9b, 0xb6, 0x67, 0x99, 0x86, 0x19, 0x8a, 0xc0, 0x22, 0x65, + 0x2d, 0x22, 0xa4, 0x1e, 0xc1, 0x68, 0x17, 0x0f, 0x07, 0xc1, 0x2b, 0xd7, 0xef, 0xa9, 0x0c, 0x4c, + 0xa2, 0x8b, 0x84, 0xa1, 0xfb, 0x80, 0x0c, 0xcb, 0x24, 0x4e, 0xd8, 0x35, 0x7c, 0xd2, 0x23, 0x4e, + 0x68, 0x62, 0x4b, 0x06, 0x19, 0x39, 0x9d, 0x97, 0x78, 0x87, 0x7b, 0x31, 0x9e, 0x72, 0x49, 0x9f, + 0x90, 0x21, 0x73, 0x88, 0xf9, 0x5c, 0x52, 0x1d, 0xee, 0xd1, 0x4b, 0xff, 0x94, 0xf7, 0xf3, 0x09, + 0xd4, 0x3a, 0xbb, 0x5f, 0x13, 0x23, 0x9c, 0x66, 0xc6, 0xe7, 0xfb, 0x2a, 0x34, 0x3b, 0xf2, 0x36, + 0x40, 0x25, 0x9a, 0x25, 0x99, 0x29, 0x9f, 0xa6, 0xc8, 0x29, 0x27, 0x72, 0x89, 0xa5, 0xc2, 0x5c, + 0x62, 0x39, 0x1b, 0xba, 0x67, 0xf2, 0xd1, 0x95, 0xb1, 0xf2, 0xd1, 0xa3, 0xb7, 0x97, 0x6a, 0xf6, + 0xf6, 0xf2, 0x2f, 0xa9, 0xdb, 0x4b, 0x2d, 0x1b, 0x06, 0x2b, 0xfd, 0x44, 0xf2, 0x02, 0xb3, 0x3d, + 0x72, 0x81, 0xa9, 0x1f, 0xd9, 0x5e, 0xa7, 0xee, 0x31, 0xb7, 0x93, 0x15, 0xc8, 0x86, 0x2a, 0xde, + 0x17, 0xc4, 0x64, 0xd5, 0xf1, 0xd8, 0x15, 0xae, 0x74, 0x4e, 0x0b, 0x32, 0x39, 0xad, 0x64, 0xc9, + 0xa1, 0x35, 0x6e, 0xc9, 0x21, 0x51, 0x3e, 0x99, 0x3d, 0xbc, 0x7c, 0x52, 0x20, 0xed, 0x73, 0x13, + 0x48, 0xfb, 0x6f, 0x2a, 0xd0, 0x8c, 0xce, 0x4e, 0x69, 0x6a, 0xe7, 0xa1, 0x64, 0xca, 0x7c, 0x47, + 0xc9, 0x74, 0x8e, 0x20, 0x8e, 0xc9, 0xec, 0x60, 0xa5, 0x30, 0x3b, 0x58, 0x3d, 0x5a, 0x76, 0xb0, + 0x76, 0x48, 0x76, 0xb0, 0x9e, 0x93, 0x1d, 0x6c, 0x1c, 0x96, 0x1d, 0x6c, 0x16, 0x67, 0x07, 0x61, + 0xc2, 0x8b, 0x59, 0x6b, 0x8c, 0x8b, 0xd9, 0xec, 0x71, 0xb2, 0x83, 0x73, 0xc7, 0xce, 0x0e, 0xce, + 0x4f, 0x20, 0x33, 0xff, 0xa3, 0xc1, 0x8a, 0x32, 0xb8, 0xbc, 0x03, 0xcd, 0x48, 0xe3, 0x55, 0xce, + 0x27, 0xea, 0x44, 0x2f, 0xe3, 0x11, 0xf2, 0xd8, 0x77, 0xf8, 0x10, 0xd6, 0xd4, 0xcf, 0x06, 0xd0, + 0x17, 0xc5, 0x16, 0xfc, 0xa2, 0xe2, 0x21, 0x8a, 0xc2, 0x8c, 0xa9, 0x4d, 0xfa, 0xdf, 0x2a, 0xd0, + 0x88, 0x22, 0xdf, 0x25, 0xa8, 0x24, 0x92, 0x43, 0x65, 0x9d, 0xf4, 0x8f, 0x65, 0xd0, 0x2f, 0x41, + 0x79, 0x8f, 0x84, 0x4a, 0x4f, 0x18, 0xa5, 0x9d, 0x28, 0x82, 0x02, 0xbd, 0x41, 0x28, 0xd2, 0x82, + 0x79, 0x40, 0x6f, 0x10, 0xa2, 0x37, 0xa0, 0xe2, 0xb9, 0x41, 0x28, 0x72, 0x3b, 0x39, 0x48, 0x06, + 0x41, 0xd7, 0xa1, 0xd6, 0x23, 0x16, 0x09, 0x89, 0xb0, 0xd1, 0x39, 0x60, 0x01, 0x42, 0x37, 0xa1, + 0xee, 0xb2, 0x59, 0x2b, 0xed, 0x71, 0x8c, 0x97, 0x28, 0x3a, 0x95, 0x7d, 0x82, 0x7b, 0xc2, 0x10, + 0xe7, 0x4d, 0x85, 0x42, 0xe8, 0xbd, 0xc7, 0xc3, 0xa1, 0xb1, 0x2f, 0x34, 0x31, 0x07, 0xcb, 0x31, + 0x14, 0x1c, 0xfa, 0xd8, 0x90, 0xea, 0x97, 0x07, 0x66, 0x98, 0x31, 0x8d, 0x6e, 0xda, 0xfd, 0xcd, + 0x4d, 0xe0, 0xfe, 0xa6, 0xac, 0x82, 0xdf, 0x6a, 0x50, 0x65, 0xc5, 0xf2, 0x7f, 0xf0, 0xd3, 0x2a, + 0x02, 0x10, 0xab, 0xc4, 0xf8, 0xb9, 0xf9, 0xac, 0x35, 0x55, 0xeb, 0xda, 0x59, 0x68, 0xc6, 0x56, + 0x26, 0xab, 0x6b, 0x1b, 0x43, 0x38, 0x99, 0xfb, 0x64, 0x07, 0xfd, 0x7b, 0xf1, 0xac, 0x2e, 0x65, + 0x66, 0x95, 0x13, 0x87, 0xa8, 0xa7, 0xf6, 0x3b, 0x0d, 0x5a, 0x7a, 0x7e, 0xa2, 0x55, 0x51, 0x3c, + 0x4b, 0xd8, 0xef, 0xd2, 0xd1, 0xec, 0x77, 0xd2, 0xa1, 0x96, 0x47, 0x1c, 0xea, 0x94, 0x5f, 0xb9, + 0x7c, 0xaf, 0xc1, 0x5a, 0xce, 0xf5, 0xfb, 0x83, 0x91, 0x08, 0x8e, 0x1b, 0xf8, 0x13, 0x39, 0x11, + 0xdc, 0xce, 0x4c, 0x3a, 0x70, 0x3b, 0xae, 0x91, 0xff, 0x65, 0x09, 0x1a, 0x32, 0xb8, 0x3b, 0xc2, + 0x26, 0x1f, 0xe3, 0x91, 0x43, 0xe2, 0x7c, 0xca, 0x47, 0x3b, 0x9f, 0xe8, 0xd9, 0x55, 0x65, 0x8c, + 0x67, 0x57, 0x53, 0xbe, 0xb6, 0x7c, 0xa7, 0xc1, 0xb2, 0x2a, 0x95, 0x71, 0x8b, 0xca, 0x0e, 0x6f, + 0x56, 0x3d, 0x72, 0x94, 0x5d, 0x76, 0x66, 0xf4, 0x08, 0x77, 0xec, 0xb3, 0xfa, 0x8b, 0x46, 0xf5, + 0x55, 0x46, 0xdf, 0xef, 0x41, 0xbd, 0xc7, 0x1f, 0x4e, 0xa9, 0xde, 0x1b, 0xaa, 0x52, 0x27, 0x12, + 0x8f, 0x9e, 0xc1, 0xaa, 0x9c, 0x54, 0xba, 0xac, 0x54, 0xca, 0x79, 0x14, 0xa9, 0xe2, 0xb6, 0xec, + 0x2b, 0xb6, 0x65, 0xca, 0xe5, 0x22, 0x1f, 0x56, 0x95, 0x6f, 0x1f, 0xc7, 0x7f, 0xd6, 0xa9, 0x5a, + 0x41, 0x4e, 0xc1, 0xb2, 0x05, 0x35, 0x6e, 0x40, 0xa9, 0x81, 0x70, 0x06, 0x96, 0x85, 0x77, 0x45, + 0xd5, 0xb2, 0xa1, 0x47, 0xdf, 0xe8, 0x23, 0x98, 0xeb, 0x25, 0xdf, 0x93, 0x89, 0x03, 0x4d, 0x79, + 0x8a, 0xd4, 0x83, 0x33, 0x3d, 0x8d, 0x47, 0xa7, 0xa9, 0x34, 0xe0, 0x5e, 0xd7, 0x75, 0xac, 0x61, + 0x6c, 0x7e, 0x70, 0xaf, 0xe3, 0x58, 0x43, 0x74, 0x06, 0xe0, 0x95, 0x6f, 0x86, 0x84, 0x53, 0x79, + 0xb4, 0xdf, 0x64, 0x2d, 0x8c, 0x7c, 0x1e, 0xca, 0x07, 0xb6, 0x25, 0xe2, 0x93, 0x54, 0x40, 0xfc, + 0xcc, 0xb6, 0x74, 0x4a, 0xcb, 0x5e, 0x5e, 0x6b, 0x63, 0x5d, 0x5e, 0x13, 0x61, 0x77, 0xfd, 0x90, + 0xb0, 0x3b, 0x7d, 0xf7, 0x68, 0x64, 0xee, 0x1e, 0x51, 0xe1, 0xb7, 0x99, 0x2c, 0xfc, 0x9e, 0x83, + 0x96, 0x3d, 0xb0, 0x42, 0xd3, 0xb3, 0x48, 0xd7, 0xed, 0xb3, 0x48, 0x44, 0xd3, 0x41, 0x36, 0x75, + 0x58, 0xa0, 0x67, 0xe3, 0x03, 0xd3, 0x1e, 0xd8, 0x2c, 0xf2, 0xd0, 0x74, 0xf9, 0x89, 0xae, 0xc2, + 0x12, 0x39, 0x30, 0xac, 0x41, 0x60, 0xbe, 0x24, 0x5d, 0x89, 0x99, 0x65, 0xe3, 0x2e, 0x46, 0x84, + 0x47, 0x02, 0x4c, 0xd9, 0x98, 0x0e, 0x83, 0xcc, 0x09, 0x36, 0xfc, 0x73, 0x84, 0x8d, 0xc0, 0xcc, + 0x8f, 0xb2, 0x11, 0xe0, 0x33, 0x00, 0x36, 0x3e, 0xe8, 0x5a, 0xc4, 0xd9, 0x0b, 0xf7, 0xdb, 0x0b, + 0xeb, 0xda, 0xe5, 0xb2, 0xde, 0xb4, 0xf1, 0xc1, 0x43, 0xd6, 0xc0, 0xc8, 0xa6, 0x23, 0xc9, 0x8b, + 0x82, 0x6c, 0x3a, 0x82, 0xdc, 0x86, 0xba, 0x87, 0x43, 0xba, 0xbd, 0xed, 0x25, 0x1e, 0xb4, 0x8a, + 0x4f, 0x2a, 0x05, 0x94, 0xaf, 0x19, 0x12, 0x3b, 0x68, 0x23, 0xd6, 0xaf, 0x61, 0xe3, 0x03, 0x56, + 0x00, 0x66, 0x44, 0xd3, 0x11, 0xc4, 0x65, 0x41, 0x34, 0x1d, 0x4e, 0x3c, 0x0f, 0xb3, 0x03, 0xc7, + 0x7c, 0x31, 0x20, 0x82, 0xbe, 0xc2, 0x66, 0xde, 0xe2, 0x6d, 0x1c, 0x72, 0x11, 0xe6, 0x29, 0xf3, + 0x84, 0x7a, 0xac, 0x32, 0x26, 0x73, 0x36, 0x3e, 0x48, 0xc4, 0x17, 0x14, 0x66, 0x3a, 0x49, 0xd8, + 0x9a, 0x80, 0x99, 0x4e, 0x02, 0x96, 0x74, 0x97, 0x27, 0x58, 0x8a, 0x25, 0x76, 0x97, 0x9b, 0x50, + 0x21, 0xce, 0xc0, 0x6e, 0xb7, 0xb3, 0xef, 0xfb, 0xa8, 0xac, 0x30, 0x22, 0xcb, 0xcf, 0x0c, 0x3d, + 0xd2, 0x3e, 0xc9, 0xaf, 0xc1, 0xf4, 0x7f, 0xf4, 0x36, 0xd4, 0xb0, 0x65, 0x51, 0x09, 0x38, 0x75, + 0x94, 0x92, 0x76, 0x15, 0x5b, 0x56, 0xa7, 0x4f, 0x7b, 0xb9, 0x0e, 0x93, 0x9b, 0xd3, 0x47, 0xea, + 0xe5, 0x3a, 0x84, 0xf7, 0xc2, 0xce, 0x90, 0xf6, 0x7a, 0xed, 0x68, 0x63, 0x39, 0xc3, 0x4e, 0x1f, + 0x5d, 0x80, 0xb2, 0xe3, 0x86, 0xed, 0x33, 0x8a, 0xea, 0x2d, 0xeb, 0xa2, 0x53, 0x32, 0x8d, 0x92, + 0xf9, 0x31, 0x9c, 0xcd, 0xda, 0xf5, 0xa8, 0x9a, 0xaf, 0x73, 0x0c, 0x7a, 0x07, 0x20, 0xb1, 0xd9, + 0xe7, 0xb2, 0xde, 0x30, 0xde, 0x75, 0x3d, 0x81, 0xcc, 0xb7, 0x7a, 0xeb, 0x8c, 0x45, 0xca, 0xea, + 0xe5, 0xfd, 0xca, 0x43, 0x6d, 0xf5, 0xd0, 0x5b, 0xb1, 0x37, 0x39, 0x9f, 0x0d, 0x40, 0x12, 0x2f, + 0x74, 0x63, 0x2f, 0x32, 0x12, 0x2d, 0x6c, 0x64, 0xa3, 0x85, 0x35, 0xa8, 0xf5, 0x5d, 0xdf, 0xc6, + 0x61, 0x7b, 0x93, 0x11, 0xc5, 0x57, 0x91, 0x97, 0xb8, 0x30, 0xe1, 0xb3, 0x9a, 0x6c, 0x1d, 0xe0, + 0xda, 0x48, 0x21, 0x58, 0x71, 0x74, 0xec, 0x9d, 0x30, 0x37, 0xf5, 0x53, 0x78, 0x56, 0xa3, 0x78, + 0xe2, 0xff, 0xf3, 0x85, 0xee, 0xab, 0xb0, 0xac, 0x48, 0x7f, 0x6d, 0xfc, 0xa1, 0x04, 0xf3, 0xe9, + 0x2a, 0x43, 0xa4, 0x75, 0x5a, 0x42, 0xeb, 0x0e, 0x7f, 0x75, 0x23, 0x53, 0x56, 0xe5, 0x4c, 0xca, + 0xaa, 0x12, 0xa5, 0xac, 0xd6, 0xc4, 0x0e, 0xcb, 0x77, 0x64, 0xe2, 0x0b, 0x6d, 0xc2, 0xdc, 0x2e, + 0xc1, 0x3e, 0xf1, 0xbb, 0xe2, 0xf4, 0xf9, 0x7b, 0x99, 0x59, 0xde, 0xf8, 0x31, 0x97, 0x81, 0x6b, + 0x50, 0xed, 0x5b, 0xee, 0x2b, 0xf9, 0x38, 0x7f, 0x4d, 0x99, 0x97, 0x0e, 0x74, 0x0e, 0x42, 0xd7, + 0x61, 0x99, 0xd2, 0xbb, 0x66, 0xaf, 0x6b, 0xb8, 0x8e, 0x43, 0x8c, 0x90, 0x95, 0x0c, 0x1a, 0xbc, + 0xec, 0x40, 0x49, 0x0f, 0x7a, 0xf7, 0x38, 0xe1, 0xd3, 0xe2, 0x57, 0x2b, 0x93, 0x3c, 0x05, 0xfe, + 0x41, 0x83, 0x93, 0xf9, 0x55, 0xa0, 0x6d, 0x58, 0x18, 0xf9, 0xc5, 0x81, 0x90, 0xb8, 0x53, 0xf9, + 0xb5, 0x9f, 0x9d, 0x19, 0x7d, 0x3e, 0xfd, 0x2b, 0x83, 0x63, 0x4b, 0xe0, 0x37, 0x70, 0xba, 0xe0, + 0x47, 0x0d, 0xa8, 0x5b, 0x2c, 0x89, 0x57, 0xb2, 0x92, 0x98, 0x5b, 0xac, 0x52, 0x4b, 0xe4, 0x6f, + 0x35, 0xa8, 0xf1, 0x0b, 0xbe, 0x7c, 0x46, 0xa4, 0xc5, 0xcf, 0x88, 0x0e, 0x17, 0xb8, 0xf7, 0xa0, + 0xf9, 0x52, 0x14, 0xac, 0x64, 0xc5, 0xe3, 0x74, 0x7e, 0x4d, 0x2b, 0xd0, 0x63, 0xf4, 0xb4, 0xef, + 0x6a, 0x3f, 0x6a, 0x54, 0x87, 0x46, 0x0b, 0x70, 0xcc, 0xbd, 0x89, 0xca, 0x02, 0xf3, 0x66, 0xed, + 0xd8, 0x62, 0x8a, 0x44, 0x54, 0x8e, 0x61, 0x54, 0x24, 0xa2, 0xa6, 0x3c, 0xe3, 0x3d, 0x58, 0x18, + 0xd9, 0x1e, 0xf4, 0xb4, 0xf8, 0xb8, 0xcf, 0x29, 0x8e, 0x3b, 0x55, 0x33, 0x54, 0x9f, 0xf1, 0x0b, + 0x58, 0x7b, 0xa2, 0x9c, 0xc2, 0xcf, 0xf7, 0xbb, 0x8d, 0x4d, 0x68, 0xf1, 0x4a, 0xdf, 0x96, 0xef, + 0xe3, 0x61, 0x5c, 0xa1, 0xe4, 0x47, 0x21, 0x2a, 0x94, 0x9f, 0x43, 0x5d, 0x94, 0x03, 0xd1, 0xc3, + 0xe2, 0x85, 0x9f, 0xc8, 0x2e, 0x9c, 0x75, 0xcc, 0x59, 0xf0, 0x4f, 0x1a, 0x94, 0x9f, 0x62, 0x75, + 0xb1, 0xf4, 0x70, 0x99, 0xce, 0xc4, 0xe0, 0xe5, 0x69, 0xfd, 0xa0, 0x61, 0x12, 0x29, 0xf9, 0xa3, + 0x06, 0xe5, 0x67, 0xb6, 0xa5, 0x5c, 0xcb, 0x6b, 0xd0, 0xa4, 0x7f, 0x03, 0x0f, 0x1b, 0xb2, 0xf8, + 0x1b, 0x37, 0x50, 0x43, 0xef, 0xf9, 0xa4, 0x6f, 0x1e, 0x08, 0x59, 0x16, 0x5f, 0xb4, 0x17, 0x0e, + 0x43, 0xdf, 0xdc, 0x1d, 0x84, 0xf2, 0xad, 0x71, 0xdc, 0x40, 0x15, 0xe4, 0x95, 0x8f, 0x3d, 0x2f, + 0x2a, 0x48, 0xc8, 0xcf, 0x29, 0xff, 0x30, 0xe1, 0xee, 0xeb, 0x30, 0xef, 0xfa, 0x7b, 0xb2, 0x4b, + 0xf7, 0xe5, 0xed, 0xbb, 0xb3, 0xe2, 0xe7, 0xb1, 0x8f, 0x7d, 0x37, 0x74, 0x1f, 0x6b, 0x3f, 0x94, + 0xca, 0x9d, 0xad, 0x27, 0xbb, 0x35, 0xf6, 0x5b, 0xd2, 0xdb, 0x7f, 0x0f, 0x00, 0x00, 0xff, 0xff, + 0x29, 0xfd, 0x14, 0x6b, 0x47, 0x3b, 0x00, 0x00, +} diff --git a/vendor/github.com/googleapis/gnostic/OpenAPIv3/OpenAPIv3.proto b/vendor/github.com/googleapis/gnostic/OpenAPIv3/OpenAPIv3.proto new file mode 100644 index 000000000..f9a75c52c --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/OpenAPIv3/OpenAPIv3.proto @@ -0,0 +1,671 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// THIS FILE IS AUTOMATICALLY GENERATED. + +syntax = "proto3"; + +package openapi.v3; + +import "google/protobuf/any.proto"; + +// This option lets the proto compiler generate Java code inside the package +// name (see below) instead of inside an outer class. It creates a simpler +// developer experience by reducing one-level of name nesting and be +// consistent with most programming languages that don't support outer classes. +option java_multiple_files = true; + +// The Java outer classname should be the filename in UpperCamelCase. This +// class is only used to hold proto descriptor, so developers don't need to +// work with it directly. +option java_outer_classname = "OpenAPIProto"; + +// The Java package name must be proto package name with proper prefix. +option java_package = "org.openapi_v3"; + +// A reasonable prefix for the Objective-C symbols generated from the package. +// It should at a minimum be 3 characters long, all uppercase, and convention +// is to use an abbreviation of the package name. Something short, but +// hopefully unique enough to not conflict with things that may come along in +// the future. 'GPB' is reserved for the protocol buffer implementation itself. +option objc_class_prefix = "OAS"; + +message AdditionalPropertiesItem { + oneof oneof { + SchemaOrReference schema_or_reference = 1; + bool boolean = 2; + } +} + +message Any { + google.protobuf.Any value = 1; + string yaml = 2; +} + +message AnyOrExpression { + oneof oneof { + Any any = 1; + Expression expression = 2; + } +} + +message AnysOrExpressions { + repeated NamedAnyOrExpression additional_properties = 1; +} + +// A map of possible out-of band callbacks related to the parent operation. Each value in the map is a Path Item Object that describes a set of requests that may be initiated by the API provider and the expected responses. The key value used to identify the callback object is an expression, evaluated at runtime, that identifies a URL to use for the callback operation. +message Callback { + repeated NamedPathItem path = 1; + repeated NamedAny specification_extension = 2; +} + +message CallbackOrReference { + oneof oneof { + Callback callback = 1; + Reference reference = 2; + } +} + +message CallbacksOrReferences { + repeated NamedCallbackOrReference additional_properties = 1; +} + +// Holds a set of reusable objects for different aspects of the OAS. All objects defined within the components object will have no effect on the API unless they are explicitly referenced from properties outside the components object. +message Components { + SchemasOrReferences schemas = 1; + ResponsesOrReferences responses = 2; + ParametersOrReferences parameters = 3; + ExamplesOrReferences examples = 4; + RequestBodiesOrReferences request_bodies = 5; + HeadersOrReferences headers = 6; + SecuritySchemesOrReferences security_schemes = 7; + LinksOrReferences links = 8; + CallbacksOrReferences callbacks = 9; + repeated NamedAny specification_extension = 10; +} + +// Contact information for the exposed API. +message Contact { + string name = 1; + string url = 2; + string email = 3; + repeated NamedAny specification_extension = 4; +} + +message DefaultType { + oneof oneof { + double number = 1; + bool boolean = 2; + string string = 3; + } +} + +// When request bodies or response payloads may be one of a number of different schemas, a `discriminator` object can be used to aid in serialization, deserialization, and validation. The discriminator is a specific object in a schema which is used to inform the consumer of the specification of an alternative schema based on the value associated with it. When using the discriminator, _inline_ schemas will not be considered. +message Discriminator { + string property_name = 1; + Strings mapping = 2; +} + +message Document { + string openapi = 1; + Info info = 2; + repeated Server servers = 3; + Paths paths = 4; + Components components = 5; + repeated SecurityRequirement security = 6; + repeated Tag tags = 7; + ExternalDocs external_docs = 8; + repeated NamedAny specification_extension = 9; +} + +// A single encoding definition applied to a single schema property. +message Encoding { + string content_type = 1; + HeadersOrReferences headers = 2; + string style = 3; + bool explode = 4; + bool allow_reserved = 5; + repeated NamedAny specification_extension = 6; +} + +message Encodings { + repeated NamedEncoding additional_properties = 1; +} + +message Example { + string summary = 1; + string description = 2; + Any value = 3; + string external_value = 4; + repeated NamedAny specification_extension = 5; +} + +message ExampleOrReference { + oneof oneof { + Example example = 1; + Reference reference = 2; + } +} + +message Examples { +} + +message ExamplesOrReferences { + repeated NamedExampleOrReference additional_properties = 1; +} + +message Expression { + repeated NamedAny additional_properties = 1; +} + +// Allows referencing an external resource for extended documentation. +message ExternalDocs { + string description = 1; + string url = 2; + repeated NamedAny specification_extension = 3; +} + +// The Header Object follows the structure of the Parameter Object with the following changes: 1. `name` MUST NOT be specified, it is given in the corresponding `headers` map. 1. `in` MUST NOT be specified, it is implicitly in `header`. 1. All traits that are affected by the location MUST be applicable to a location of `header` (for example, `style`). +message Header { + string description = 1; + bool required = 2; + bool deprecated = 3; + bool allow_empty_value = 4; + string style = 5; + bool explode = 6; + bool allow_reserved = 7; + SchemaOrReference schema = 8; + Any example = 9; + ExamplesOrReferences examples = 10; + MediaTypes content = 11; + repeated NamedAny specification_extension = 12; +} + +message HeaderOrReference { + oneof oneof { + Header header = 1; + Reference reference = 2; + } +} + +message HeadersOrReferences { + repeated NamedHeaderOrReference additional_properties = 1; +} + +// The object provides metadata about the API. The metadata MAY be used by the clients if needed, and MAY be presented in editing or documentation generation tools for convenience. +message Info { + string title = 1; + string description = 2; + string terms_of_service = 3; + Contact contact = 4; + License license = 5; + string version = 6; + repeated NamedAny specification_extension = 7; +} + +message ItemsItem { + repeated SchemaOrReference schema_or_reference = 1; +} + +// License information for the exposed API. +message License { + string name = 1; + string url = 2; + repeated NamedAny specification_extension = 3; +} + +// The `Link object` represents a possible design-time link for a response. The presence of a link does not guarantee the caller's ability to successfully invoke it, rather it provides a known relationship and traversal mechanism between responses and other operations. Unlike _dynamic_ links (i.e. links provided **in** the response payload), the OAS linking mechanism does not require link information in the runtime response. For computing links, and providing instructions to execute them, a runtime expression is used for accessing values in an operation and using them as parameters while invoking the linked operation. +message Link { + string operation_ref = 1; + string operation_id = 2; + AnysOrExpressions parameters = 3; + AnyOrExpression request_body = 4; + string description = 5; + Server server = 6; + repeated NamedAny specification_extension = 7; +} + +message LinkOrReference { + oneof oneof { + Link link = 1; + Reference reference = 2; + } +} + +message LinksOrReferences { + repeated NamedLinkOrReference additional_properties = 1; +} + +// Each Media Type Object provides schema and examples for the media type identified by its key. +message MediaType { + SchemaOrReference schema = 1; + Any example = 2; + ExamplesOrReferences examples = 3; + Encodings encoding = 4; + repeated NamedAny specification_extension = 5; +} + +message MediaTypes { + repeated NamedMediaType additional_properties = 1; +} + +// Automatically-generated message used to represent maps of Any as ordered (name,value) pairs. +message NamedAny { + // Map key + string name = 1; + // Mapped value + Any value = 2; +} + +// Automatically-generated message used to represent maps of AnyOrExpression as ordered (name,value) pairs. +message NamedAnyOrExpression { + // Map key + string name = 1; + // Mapped value + AnyOrExpression value = 2; +} + +// Automatically-generated message used to represent maps of CallbackOrReference as ordered (name,value) pairs. +message NamedCallbackOrReference { + // Map key + string name = 1; + // Mapped value + CallbackOrReference value = 2; +} + +// Automatically-generated message used to represent maps of Encoding as ordered (name,value) pairs. +message NamedEncoding { + // Map key + string name = 1; + // Mapped value + Encoding value = 2; +} + +// Automatically-generated message used to represent maps of ExampleOrReference as ordered (name,value) pairs. +message NamedExampleOrReference { + // Map key + string name = 1; + // Mapped value + ExampleOrReference value = 2; +} + +// Automatically-generated message used to represent maps of HeaderOrReference as ordered (name,value) pairs. +message NamedHeaderOrReference { + // Map key + string name = 1; + // Mapped value + HeaderOrReference value = 2; +} + +// Automatically-generated message used to represent maps of LinkOrReference as ordered (name,value) pairs. +message NamedLinkOrReference { + // Map key + string name = 1; + // Mapped value + LinkOrReference value = 2; +} + +// Automatically-generated message used to represent maps of MediaType as ordered (name,value) pairs. +message NamedMediaType { + // Map key + string name = 1; + // Mapped value + MediaType value = 2; +} + +// Automatically-generated message used to represent maps of ParameterOrReference as ordered (name,value) pairs. +message NamedParameterOrReference { + // Map key + string name = 1; + // Mapped value + ParameterOrReference value = 2; +} + +// Automatically-generated message used to represent maps of PathItem as ordered (name,value) pairs. +message NamedPathItem { + // Map key + string name = 1; + // Mapped value + PathItem value = 2; +} + +// Automatically-generated message used to represent maps of RequestBodyOrReference as ordered (name,value) pairs. +message NamedRequestBodyOrReference { + // Map key + string name = 1; + // Mapped value + RequestBodyOrReference value = 2; +} + +// Automatically-generated message used to represent maps of ResponseOrReference as ordered (name,value) pairs. +message NamedResponseOrReference { + // Map key + string name = 1; + // Mapped value + ResponseOrReference value = 2; +} + +// Automatically-generated message used to represent maps of SchemaOrReference as ordered (name,value) pairs. +message NamedSchemaOrReference { + // Map key + string name = 1; + // Mapped value + SchemaOrReference value = 2; +} + +// Automatically-generated message used to represent maps of SecuritySchemeOrReference as ordered (name,value) pairs. +message NamedSecuritySchemeOrReference { + // Map key + string name = 1; + // Mapped value + SecuritySchemeOrReference value = 2; +} + +// Automatically-generated message used to represent maps of ServerVariable as ordered (name,value) pairs. +message NamedServerVariable { + // Map key + string name = 1; + // Mapped value + ServerVariable value = 2; +} + +// Automatically-generated message used to represent maps of string as ordered (name,value) pairs. +message NamedString { + // Map key + string name = 1; + // Mapped value + string value = 2; +} + +// Configuration details for a supported OAuth Flow +message OauthFlow { + string authorization_url = 1; + string token_url = 2; + string refresh_url = 3; + Strings scopes = 4; + repeated NamedAny specification_extension = 5; +} + +// Allows configuration of the supported OAuth Flows. +message OauthFlows { + OauthFlow implicit = 1; + OauthFlow password = 2; + OauthFlow client_credentials = 3; + OauthFlow authorization_code = 4; + repeated NamedAny specification_extension = 5; +} + +message Object { + repeated NamedAny additional_properties = 1; +} + +// Describes a single API operation on a path. +message Operation { + repeated string tags = 1; + string summary = 2; + string description = 3; + ExternalDocs external_docs = 4; + string operation_id = 5; + repeated ParameterOrReference parameters = 6; + RequestBodyOrReference request_body = 7; + Responses responses = 8; + CallbacksOrReferences callbacks = 9; + bool deprecated = 10; + repeated SecurityRequirement security = 11; + repeated Server servers = 12; + repeated NamedAny specification_extension = 13; +} + +// Describes a single operation parameter. A unique parameter is defined by a combination of a name and location. +message Parameter { + string name = 1; + string in = 2; + string description = 3; + bool required = 4; + bool deprecated = 5; + bool allow_empty_value = 6; + string style = 7; + bool explode = 8; + bool allow_reserved = 9; + SchemaOrReference schema = 10; + Any example = 11; + ExamplesOrReferences examples = 12; + MediaTypes content = 13; + repeated NamedAny specification_extension = 14; +} + +message ParameterOrReference { + oneof oneof { + Parameter parameter = 1; + Reference reference = 2; + } +} + +message ParametersOrReferences { + repeated NamedParameterOrReference additional_properties = 1; +} + +// Describes the operations available on a single path. A Path Item MAY be empty, due to ACL constraints. The path itself is still exposed to the documentation viewer but they will not know which operations and parameters are available. +message PathItem { + string _ref = 1; + string summary = 2; + string description = 3; + Operation get = 4; + Operation put = 5; + Operation post = 6; + Operation delete = 7; + Operation options = 8; + Operation head = 9; + Operation patch = 10; + Operation trace = 11; + repeated Server servers = 12; + repeated ParameterOrReference parameters = 13; + repeated NamedAny specification_extension = 14; +} + +// Holds the relative paths to the individual endpoints and their operations. The path is appended to the URL from the `Server Object` in order to construct the full URL. The Paths MAY be empty, due to ACL constraints. +message Paths { + repeated NamedPathItem path = 1; + repeated NamedAny specification_extension = 2; +} + +message Properties { + repeated NamedSchemaOrReference additional_properties = 1; +} + +// A simple object to allow referencing other components in the specification, internally and externally. The Reference Object is defined by JSON Reference and follows the same structure, behavior and rules. For this specification, reference resolution is accomplished as defined by the JSON Reference specification and not by the JSON Schema specification. +message Reference { + string _ref = 1; +} + +message RequestBodiesOrReferences { + repeated NamedRequestBodyOrReference additional_properties = 1; +} + +// Describes a single request body. +message RequestBody { + string description = 1; + MediaTypes content = 2; + bool required = 3; + repeated NamedAny specification_extension = 4; +} + +message RequestBodyOrReference { + oneof oneof { + RequestBody request_body = 1; + Reference reference = 2; + } +} + +// Describes a single response from an API Operation, including design-time, static `links` to operations based on the response. +message Response { + string description = 1; + HeadersOrReferences headers = 2; + MediaTypes content = 3; + LinksOrReferences links = 4; + repeated NamedAny specification_extension = 5; +} + +message ResponseOrReference { + oneof oneof { + Response response = 1; + Reference reference = 2; + } +} + +// A container for the expected responses of an operation. The container maps a HTTP response code to the expected response. The documentation is not necessarily expected to cover all possible HTTP response codes because they may not be known in advance. However, documentation is expected to cover a successful operation response and any known errors. The `default` MAY be used as a default response object for all HTTP codes that are not covered individually by the specification. The `Responses Object` MUST contain at least one response code, and it SHOULD be the response for a successful operation call. +message Responses { + ResponseOrReference default = 1; + repeated NamedResponseOrReference response_or_reference = 2; + repeated NamedAny specification_extension = 3; +} + +message ResponsesOrReferences { + repeated NamedResponseOrReference additional_properties = 1; +} + +// The Schema Object allows the definition of input and output data types. These types can be objects, but also primitives and arrays. This object is an extended subset of the JSON Schema Specification Wright Draft 00. For more information about the properties, see JSON Schema Core and JSON Schema Validation. Unless stated otherwise, the property definitions follow the JSON Schema. +message Schema { + bool nullable = 1; + Discriminator discriminator = 2; + bool read_only = 3; + bool write_only = 4; + Xml xml = 5; + ExternalDocs external_docs = 6; + Any example = 7; + bool deprecated = 8; + string title = 9; + double multiple_of = 10; + double maximum = 11; + bool exclusive_maximum = 12; + double minimum = 13; + bool exclusive_minimum = 14; + int64 max_length = 15; + int64 min_length = 16; + string pattern = 17; + int64 max_items = 18; + int64 min_items = 19; + bool unique_items = 20; + int64 max_properties = 21; + int64 min_properties = 22; + repeated string required = 23; + repeated Any enum = 24; + string type = 25; + repeated SchemaOrReference all_of = 26; + repeated SchemaOrReference one_of = 27; + repeated SchemaOrReference any_of = 28; + Schema not = 29; + ItemsItem items = 30; + Properties properties = 31; + AdditionalPropertiesItem additional_properties = 32; + DefaultType default = 33; + string description = 34; + string format = 35; + repeated NamedAny specification_extension = 36; +} + +message SchemaOrReference { + oneof oneof { + Schema schema = 1; + Reference reference = 2; + } +} + +message SchemasOrReferences { + repeated NamedSchemaOrReference additional_properties = 1; +} + +// Lists the required security schemes to execute this operation. The name used for each property MUST correspond to a security scheme declared in the Security Schemes under the Components Object. Security Requirement Objects that contain multiple schemes require that all schemes MUST be satisfied for a request to be authorized. This enables support for scenarios where multiple query parameters or HTTP headers are required to convey security information. When a list of Security Requirement Objects is defined on the Open API object or Operation Object, only one of Security Requirement Objects in the list needs to be satisfied to authorize the request. +message SecurityRequirement { +} + +// Defines a security scheme that can be used by the operations. Supported schemes are HTTP authentication, an API key (either as a header or as a query parameter), OAuth2's common flows (implicit, password, application and access code) as defined in RFC6749, and OpenID Connect Discovery. +message SecurityScheme { + string type = 1; + string description = 2; + string name = 3; + string in = 4; + string scheme = 5; + string bearer_format = 6; + OauthFlows flows = 7; + string open_id_connect_url = 8; + repeated NamedAny specification_extension = 9; +} + +message SecuritySchemeOrReference { + oneof oneof { + SecurityScheme security_scheme = 1; + Reference reference = 2; + } +} + +message SecuritySchemesOrReferences { + repeated NamedSecuritySchemeOrReference additional_properties = 1; +} + +// An object representing a Server. +message Server { + string url = 1; + string description = 2; + ServerVariables variables = 3; + repeated NamedAny specification_extension = 4; +} + +// An object representing a Server Variable for server URL template substitution. +message ServerVariable { + repeated string enum = 1; + string default = 2; + string description = 3; + repeated NamedAny specification_extension = 4; +} + +message ServerVariables { + repeated NamedServerVariable additional_properties = 1; +} + +// Any property starting with x- is valid. +message SpecificationExtension { + oneof oneof { + double number = 1; + bool boolean = 2; + string string = 3; + } +} + +message StringArray { + repeated string value = 1; +} + +message Strings { + repeated NamedString additional_properties = 1; +} + +// Adds metadata to a single tag that is used by the Operation Object. It is not mandatory to have a Tag Object per tag defined in the Operation Object instances. +message Tag { + string name = 1; + string description = 2; + ExternalDocs external_docs = 3; + repeated NamedAny specification_extension = 4; +} + +// A metadata object that allows for more fine-tuned XML model definitions. When using arrays, XML element names are *not* inferred (for singular/plural forms) and the `name` property SHOULD be used to add that information. See examples for expected behavior. +message Xml { + string name = 1; + string namespace = 2; + string prefix = 3; + bool attribute = 4; + bool wrapped = 5; + repeated NamedAny specification_extension = 6; +} + diff --git a/vendor/github.com/googleapis/gnostic/OpenAPIv3/README.md b/vendor/github.com/googleapis/gnostic/OpenAPIv3/README.md new file mode 100644 index 000000000..987822e8b --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/OpenAPIv3/README.md @@ -0,0 +1,25 @@ +# OpenAPI v3 Protocol Buffer Models + +This directory contains a Protocol Buffer-language model +and related code for supporting OpenAPI v3. + +Gnostic applications and plugins can use OpenAPIv3.proto +to generate Protocol Buffer support code for their preferred +languages. + +OpenAPIv3.go is used by Gnostic to read JSON and YAML OpenAPI +descriptions into the Protocol Buffer-based datastructures +generated from OpenAPIv3.proto. + +OpenAPIv3.proto and OpenAPIv3.go are generated by the Gnostic +compiler generator, and OpenAPIv3.pb.go is generated by +protoc, the Protocol Buffer compiler, and protoc-gen-go, the +Protocol Buffer Go code generation plugin. + +openapi-3.0.json is a preliminary draft JSON schema for OpenAPI 3.0. +It is not the official OpenAPI 3.0 JSON Schema, which at the time +of this commit, does not exist. + +The schema-generator directory contains support code which +generates openapi-3.0.json from a draft of the OpenAPI 3.0 +specification document (Markdown). diff --git a/vendor/github.com/googleapis/gnostic/OpenAPIv3/openapi-3.0.json b/vendor/github.com/googleapis/gnostic/OpenAPIv3/openapi-3.0.json new file mode 100644 index 000000000..c15fc2e5b --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/OpenAPIv3/openapi-3.0.json @@ -0,0 +1,1248 @@ +{ + "title": "A JSON Schema for OpenAPI 3.0.", + "id": "http://openapis.org/v3/schema.json#", + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "description": "This is the root document object of the OpenAPI document.", + "required": [ + "openapi", + "info", + "paths" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/specificationExtension" + } + }, + "properties": { + "openapi": { + "type": "string" + }, + "info": { + "$ref": "#/definitions/info" + }, + "servers": { + "type": "array", + "items": { + "$ref": "#/definitions/server" + }, + "uniqueItems": true + }, + "paths": { + "$ref": "#/definitions/paths" + }, + "components": { + "$ref": "#/definitions/components" + }, + "security": { + "type": "array", + "items": { + "$ref": "#/definitions/securityRequirement" + }, + "uniqueItems": true + }, + "tags": { + "type": "array", + "items": { + "$ref": "#/definitions/tag" + }, + "uniqueItems": true + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + } + }, + "definitions": { + "info": { + "type": "object", + "description": "The object provides metadata about the API. The metadata MAY be used by the clients if needed, and MAY be presented in editing or documentation generation tools for convenience.", + "required": [ + "title", + "version" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/specificationExtension" + } + }, + "properties": { + "title": { + "type": "string" + }, + "description": { + "type": "string" + }, + "termsOfService": { + "type": "string" + }, + "contact": { + "$ref": "#/definitions/contact" + }, + "license": { + "$ref": "#/definitions/license" + }, + "version": { + "type": "string" + } + } + }, + "contact": { + "type": "object", + "description": "Contact information for the exposed API.", + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/specificationExtension" + } + }, + "properties": { + "name": { + "type": "string" + }, + "url": { + "type": "string" + }, + "email": { + "type": "string" + } + } + }, + "license": { + "type": "object", + "description": "License information for the exposed API.", + "required": [ + "name" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/specificationExtension" + } + }, + "properties": { + "name": { + "type": "string" + }, + "url": { + "type": "string" + } + } + }, + "server": { + "type": "object", + "description": "An object representing a Server.", + "required": [ + "url" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/specificationExtension" + } + }, + "properties": { + "url": { + "type": "string" + }, + "description": { + "type": "string" + }, + "variables": { + "$ref": "#/definitions/serverVariables" + } + } + }, + "serverVariable": { + "type": "object", + "description": "An object representing a Server Variable for server URL template substitution.", + "required": [ + "default" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/specificationExtension" + } + }, + "properties": { + "enum": { + "type": "array", + "items": { + "type": "string" + }, + "uniqueItems": true + }, + "default": { + "type": "string" + }, + "description": { + "type": "string" + } + } + }, + "components": { + "type": "object", + "description": "Holds a set of reusable objects for different aspects of the OAS. All objects defined within the components object will have no effect on the API unless they are explicitly referenced from properties outside the components object.", + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/specificationExtension" + } + }, + "properties": { + "schemas": { + "$ref": "#/definitions/schemasOrReferences" + }, + "responses": { + "$ref": "#/definitions/responsesOrReferences" + }, + "parameters": { + "$ref": "#/definitions/parametersOrReferences" + }, + "examples": { + "$ref": "#/definitions/examplesOrReferences" + }, + "requestBodies": { + "$ref": "#/definitions/requestBodiesOrReferences" + }, + "headers": { + "$ref": "#/definitions/headersOrReferences" + }, + "securitySchemes": { + "$ref": "#/definitions/securitySchemesOrReferences" + }, + "links": { + "$ref": "#/definitions/linksOrReferences" + }, + "callbacks": { + "$ref": "#/definitions/callbacksOrReferences" + } + } + }, + "paths": { + "type": "object", + "description": "Holds the relative paths to the individual endpoints and their operations. The path is appended to the URL from the `Server Object` in order to construct the full URL. The Paths MAY be empty, due to ACL constraints.", + "additionalProperties": false, + "patternProperties": { + "^/": { + "$ref": "#/definitions/pathItem" + }, + "^x-": { + "$ref": "#/definitions/specificationExtension" + } + } + }, + "pathItem": { + "type": "object", + "description": "Describes the operations available on a single path. A Path Item MAY be empty, due to ACL constraints. The path itself is still exposed to the documentation viewer but they will not know which operations and parameters are available.", + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/specificationExtension" + } + }, + "properties": { + "$ref": { + "type": "string" + }, + "summary": { + "type": "string" + }, + "description": { + "type": "string" + }, + "get": { + "$ref": "#/definitions/operation" + }, + "put": { + "$ref": "#/definitions/operation" + }, + "post": { + "$ref": "#/definitions/operation" + }, + "delete": { + "$ref": "#/definitions/operation" + }, + "options": { + "$ref": "#/definitions/operation" + }, + "head": { + "$ref": "#/definitions/operation" + }, + "patch": { + "$ref": "#/definitions/operation" + }, + "trace": { + "$ref": "#/definitions/operation" + }, + "servers": { + "type": "array", + "items": { + "$ref": "#/definitions/server" + }, + "uniqueItems": true + }, + "parameters": { + "type": "array", + "items": { + "$ref": "#/definitions/parameterOrReference" + }, + "uniqueItems": true + } + } + }, + "operation": { + "type": "object", + "description": "Describes a single API operation on a path.", + "required": [ + "responses" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/specificationExtension" + } + }, + "properties": { + "tags": { + "type": "array", + "items": { + "type": "string" + }, + "uniqueItems": true + }, + "summary": { + "type": "string" + }, + "description": { + "type": "string" + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + }, + "operationId": { + "type": "string" + }, + "parameters": { + "type": "array", + "items": { + "$ref": "#/definitions/parameterOrReference" + }, + "uniqueItems": true + }, + "requestBody": { + "$ref": "#/definitions/requestBodyOrReference" + }, + "responses": { + "$ref": "#/definitions/responses" + }, + "callbacks": { + "$ref": "#/definitions/callbacksOrReferences" + }, + "deprecated": { + "type": "boolean" + }, + "security": { + "type": "array", + "items": { + "$ref": "#/definitions/securityRequirement" + }, + "uniqueItems": true + }, + "servers": { + "type": "array", + "items": { + "$ref": "#/definitions/server" + }, + "uniqueItems": true + } + } + }, + "externalDocs": { + "type": "object", + "description": "Allows referencing an external resource for extended documentation.", + "required": [ + "url" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/specificationExtension" + } + }, + "properties": { + "description": { + "type": "string" + }, + "url": { + "type": "string" + } + } + }, + "parameter": { + "type": "object", + "description": "Describes a single operation parameter. A unique parameter is defined by a combination of a name and location.", + "required": [ + "name", + "in" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/specificationExtension" + } + }, + "properties": { + "name": { + "type": "string" + }, + "in": { + "type": "string" + }, + "description": { + "type": "string" + }, + "required": { + "type": "boolean" + }, + "deprecated": { + "type": "boolean" + }, + "allowEmptyValue": { + "type": "boolean" + }, + "style": { + "type": "string" + }, + "explode": { + "type": "boolean" + }, + "allowReserved": { + "type": "boolean" + }, + "schema": { + "$ref": "#/definitions/schemaOrReference" + }, + "example": { + "$ref": "#/definitions/any" + }, + "examples": { + "$ref": "#/definitions/examplesOrReferences" + }, + "content": { + "$ref": "#/definitions/mediaTypes" + } + } + }, + "requestBody": { + "type": "object", + "description": "Describes a single request body.", + "required": [ + "content" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/specificationExtension" + } + }, + "properties": { + "description": { + "type": "string" + }, + "content": { + "$ref": "#/definitions/mediaTypes" + }, + "required": { + "type": "boolean" + } + } + }, + "mediaType": { + "type": "object", + "description": "Each Media Type Object provides schema and examples for the media type identified by its key.", + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/specificationExtension" + } + }, + "properties": { + "schema": { + "$ref": "#/definitions/schemaOrReference" + }, + "example": { + "$ref": "#/definitions/any" + }, + "examples": { + "$ref": "#/definitions/examplesOrReferences" + }, + "encoding": { + "$ref": "#/definitions/encodings" + } + } + }, + "encoding": { + "type": "object", + "description": "A single encoding definition applied to a single schema property.", + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/specificationExtension" + } + }, + "properties": { + "contentType": { + "type": "string" + }, + "headers": { + "$ref": "#/definitions/headersOrReferences" + }, + "style": { + "type": "string" + }, + "explode": { + "type": "boolean" + }, + "allowReserved": { + "type": "boolean" + } + } + }, + "responses": { + "type": "object", + "description": "A container for the expected responses of an operation. The container maps a HTTP response code to the expected response. The documentation is not necessarily expected to cover all possible HTTP response codes because they may not be known in advance. However, documentation is expected to cover a successful operation response and any known errors. The `default` MAY be used as a default response object for all HTTP codes that are not covered individually by the specification. The `Responses Object` MUST contain at least one response code, and it SHOULD be the response for a successful operation call.", + "additionalProperties": false, + "patternProperties": { + "^([0-9X]{3})$": { + "$ref": "#/definitions/responseOrReference" + }, + "^x-": { + "$ref": "#/definitions/specificationExtension" + } + }, + "properties": { + "default": { + "$ref": "#/definitions/responseOrReference" + } + } + }, + "response": { + "type": "object", + "description": "Describes a single response from an API Operation, including design-time, static `links` to operations based on the response.", + "required": [ + "description" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/specificationExtension" + } + }, + "properties": { + "description": { + "type": "string" + }, + "headers": { + "$ref": "#/definitions/headersOrReferences" + }, + "content": { + "$ref": "#/definitions/mediaTypes" + }, + "links": { + "$ref": "#/definitions/linksOrReferences" + } + } + }, + "callback": { + "type": "object", + "description": "A map of possible out-of band callbacks related to the parent operation. Each value in the map is a Path Item Object that describes a set of requests that may be initiated by the API provider and the expected responses. The key value used to identify the callback object is an expression, evaluated at runtime, that identifies a URL to use for the callback operation.", + "additionalProperties": false, + "patternProperties": { + "^": { + "$ref": "#/definitions/pathItem" + }, + "^x-": { + "$ref": "#/definitions/specificationExtension" + } + } + }, + "example": { + "type": "object", + "description": "", + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/specificationExtension" + } + }, + "properties": { + "summary": { + "type": "string" + }, + "description": { + "type": "string" + }, + "value": { + "$ref": "#/definitions/any" + }, + "externalValue": { + "type": "string" + } + } + }, + "link": { + "type": "object", + "description": "The `Link object` represents a possible design-time link for a response. The presence of a link does not guarantee the caller's ability to successfully invoke it, rather it provides a known relationship and traversal mechanism between responses and other operations. Unlike _dynamic_ links (i.e. links provided **in** the response payload), the OAS linking mechanism does not require link information in the runtime response. For computing links, and providing instructions to execute them, a runtime expression is used for accessing values in an operation and using them as parameters while invoking the linked operation.", + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/specificationExtension" + } + }, + "properties": { + "operationRef": { + "type": "string" + }, + "operationId": { + "type": "string" + }, + "parameters": { + "$ref": "#/definitions/anysOrExpressions" + }, + "requestBody": { + "$ref": "#/definitions/anyOrExpression" + }, + "description": { + "type": "string" + }, + "server": { + "$ref": "#/definitions/server" + } + } + }, + "header": { + "type": "object", + "description": "The Header Object follows the structure of the Parameter Object with the following changes: 1. `name` MUST NOT be specified, it is given in the corresponding `headers` map. 1. `in` MUST NOT be specified, it is implicitly in `header`. 1. All traits that are affected by the location MUST be applicable to a location of `header` (for example, `style`).", + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/specificationExtension" + } + }, + "properties": { + "description": { + "type": "string" + }, + "required": { + "type": "boolean" + }, + "deprecated": { + "type": "boolean" + }, + "allowEmptyValue": { + "type": "boolean" + }, + "style": { + "type": "string" + }, + "explode": { + "type": "boolean" + }, + "allowReserved": { + "type": "boolean" + }, + "schema": { + "$ref": "#/definitions/schemaOrReference" + }, + "example": { + "$ref": "#/definitions/any" + }, + "examples": { + "$ref": "#/definitions/examplesOrReferences" + }, + "content": { + "$ref": "#/definitions/mediaTypes" + } + } + }, + "tag": { + "type": "object", + "description": "Adds metadata to a single tag that is used by the Operation Object. It is not mandatory to have a Tag Object per tag defined in the Operation Object instances.", + "required": [ + "name" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/specificationExtension" + } + }, + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": "string" + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + } + } + }, + "examples": { + "type": "object", + "description": "", + "additionalProperties": false + }, + "reference": { + "type": "object", + "description": "A simple object to allow referencing other components in the specification, internally and externally. The Reference Object is defined by JSON Reference and follows the same structure, behavior and rules. For this specification, reference resolution is accomplished as defined by the JSON Reference specification and not by the JSON Schema specification.", + "required": [ + "$ref" + ], + "additionalProperties": false, + "properties": { + "$ref": { + "type": "string" + } + } + }, + "schema": { + "type": "object", + "description": "The Schema Object allows the definition of input and output data types. These types can be objects, but also primitives and arrays. This object is an extended subset of the JSON Schema Specification Wright Draft 00. For more information about the properties, see JSON Schema Core and JSON Schema Validation. Unless stated otherwise, the property definitions follow the JSON Schema.", + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/specificationExtension" + } + }, + "properties": { + "nullable": { + "type": "boolean" + }, + "discriminator": { + "$ref": "#/definitions/discriminator" + }, + "readOnly": { + "type": "boolean" + }, + "writeOnly": { + "type": "boolean" + }, + "xml": { + "$ref": "#/definitions/xml" + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + }, + "example": { + "$ref": "#/definitions/any" + }, + "deprecated": { + "type": "boolean" + }, + "title": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/title" + }, + "multipleOf": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/multipleOf" + }, + "maximum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/maximum" + }, + "exclusiveMaximum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMaximum" + }, + "minimum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/minimum" + }, + "exclusiveMinimum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMinimum" + }, + "maxLength": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/maxLength" + }, + "minLength": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/minLength" + }, + "pattern": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/pattern" + }, + "maxItems": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/maxItems" + }, + "minItems": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/minItems" + }, + "uniqueItems": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/uniqueItems" + }, + "maxProperties": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/maxProperties" + }, + "minProperties": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/minProperties" + }, + "required": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/required" + }, + "enum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/enum" + }, + "type": { + "type": "string" + }, + "allOf": { + "type": "array", + "items": { + "$ref": "#/definitions/schemaOrReference" + }, + "minItems": 1 + }, + "oneOf": { + "type": "array", + "items": { + "$ref": "#/definitions/schemaOrReference" + }, + "minItems": 1 + }, + "anyOf": { + "type": "array", + "items": { + "$ref": "#/definitions/schemaOrReference" + }, + "minItems": 1 + }, + "not": { + "$ref": "#/definitions/schema" + }, + "items": { + "anyOf": [ + { + "$ref": "#/definitions/schemaOrReference" + }, + { + "type": "array", + "items": { + "$ref": "#/definitions/schemaOrReference" + }, + "minItems": 1 + } + ] + }, + "properties": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/schemaOrReference" + } + }, + "additionalProperties": { + "oneOf": [ + { + "$ref": "#/definitions/schemaOrReference" + }, + { + "type": "boolean" + } + ] + }, + "default": { + "$ref": "#/definitions/defaultType" + }, + "description": { + "type": "string" + }, + "format": { + "type": "string" + } + } + }, + "discriminator": { + "type": "object", + "description": "When request bodies or response payloads may be one of a number of different schemas, a `discriminator` object can be used to aid in serialization, deserialization, and validation. The discriminator is a specific object in a schema which is used to inform the consumer of the specification of an alternative schema based on the value associated with it. When using the discriminator, _inline_ schemas will not be considered.", + "required": [ + "propertyName" + ], + "additionalProperties": false, + "properties": { + "propertyName": { + "type": "string" + }, + "mapping": { + "$ref": "#/definitions/strings" + } + } + }, + "xml": { + "type": "object", + "description": "A metadata object that allows for more fine-tuned XML model definitions. When using arrays, XML element names are *not* inferred (for singular/plural forms) and the `name` property SHOULD be used to add that information. See examples for expected behavior.", + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/specificationExtension" + } + }, + "properties": { + "name": { + "type": "string" + }, + "namespace": { + "type": "string" + }, + "prefix": { + "type": "string" + }, + "attribute": { + "type": "boolean" + }, + "wrapped": { + "type": "boolean" + } + } + }, + "securityScheme": { + "type": "object", + "description": "Defines a security scheme that can be used by the operations. Supported schemes are HTTP authentication, an API key (either as a header or as a query parameter), OAuth2's common flows (implicit, password, application and access code) as defined in RFC6749, and OpenID Connect Discovery.", + "required": [ + "type" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/specificationExtension" + } + }, + "properties": { + "type": { + "type": "string" + }, + "description": { + "type": "string" + }, + "name": { + "type": "string" + }, + "in": { + "type": "string" + }, + "scheme": { + "type": "string" + }, + "bearerFormat": { + "type": "string" + }, + "flows": { + "$ref": "#/definitions/oauthFlows" + }, + "openIdConnectUrl": { + "type": "string" + } + } + }, + "oauthFlows": { + "type": "object", + "description": "Allows configuration of the supported OAuth Flows.", + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/specificationExtension" + } + }, + "properties": { + "implicit": { + "$ref": "#/definitions/oauthFlow" + }, + "password": { + "$ref": "#/definitions/oauthFlow" + }, + "clientCredentials": { + "$ref": "#/definitions/oauthFlow" + }, + "authorizationCode": { + "$ref": "#/definitions/oauthFlow" + } + } + }, + "oauthFlow": { + "type": "object", + "description": "Configuration details for a supported OAuth Flow", + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/specificationExtension" + } + }, + "properties": { + "authorizationUrl": { + "type": "string" + }, + "tokenUrl": { + "type": "string" + }, + "refreshUrl": { + "type": "string" + }, + "scopes": { + "$ref": "#/definitions/strings" + } + } + }, + "securityRequirement": { + "type": "object", + "description": "Lists the required security schemes to execute this operation. The name used for each property MUST correspond to a security scheme declared in the Security Schemes under the Components Object. Security Requirement Objects that contain multiple schemes require that all schemes MUST be satisfied for a request to be authorized. This enables support for scenarios where multiple query parameters or HTTP headers are required to convey security information. When a list of Security Requirement Objects is defined on the Open API object or Operation Object, only one of Security Requirement Objects in the list needs to be satisfied to authorize the request.", + "additionalProperties": false, + "patternProperties": { + "^[a-zA-Z0-9\\.\\-_]+$": { + "type": "array", + "items": { + "type": "string" + }, + "uniqueItems": true + } + } + }, + "anyOrExpression": { + "oneOf": [ + { + "$ref": "#/definitions/any" + }, + { + "$ref": "#/definitions/expression" + } + ] + }, + "callbackOrReference": { + "oneOf": [ + { + "$ref": "#/definitions/callback" + }, + { + "$ref": "#/definitions/reference" + } + ] + }, + "exampleOrReference": { + "oneOf": [ + { + "$ref": "#/definitions/example" + }, + { + "$ref": "#/definitions/reference" + } + ] + }, + "headerOrReference": { + "oneOf": [ + { + "$ref": "#/definitions/header" + }, + { + "$ref": "#/definitions/reference" + } + ] + }, + "linkOrReference": { + "oneOf": [ + { + "$ref": "#/definitions/link" + }, + { + "$ref": "#/definitions/reference" + } + ] + }, + "parameterOrReference": { + "oneOf": [ + { + "$ref": "#/definitions/parameter" + }, + { + "$ref": "#/definitions/reference" + } + ] + }, + "requestBodyOrReference": { + "oneOf": [ + { + "$ref": "#/definitions/requestBody" + }, + { + "$ref": "#/definitions/reference" + } + ] + }, + "responseOrReference": { + "oneOf": [ + { + "$ref": "#/definitions/response" + }, + { + "$ref": "#/definitions/reference" + } + ] + }, + "schemaOrReference": { + "oneOf": [ + { + "$ref": "#/definitions/schema" + }, + { + "$ref": "#/definitions/reference" + } + ] + }, + "securitySchemeOrReference": { + "oneOf": [ + { + "$ref": "#/definitions/securityScheme" + }, + { + "$ref": "#/definitions/reference" + } + ] + }, + "anysOrExpressions": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/anyOrExpression" + } + }, + "callbacksOrReferences": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/callbackOrReference" + } + }, + "encodings": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/encoding" + } + }, + "examplesOrReferences": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/exampleOrReference" + } + }, + "headersOrReferences": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/headerOrReference" + } + }, + "linksOrReferences": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/linkOrReference" + } + }, + "mediaTypes": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/mediaType" + } + }, + "parametersOrReferences": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/parameterOrReference" + } + }, + "requestBodiesOrReferences": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/requestBodyOrReference" + } + }, + "responsesOrReferences": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/responseOrReference" + } + }, + "schemasOrReferences": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/schemaOrReference" + } + }, + "securitySchemesOrReferences": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/securitySchemeOrReference" + } + }, + "serverVariables": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/serverVariable" + } + }, + "strings": { + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "object": { + "type": "object", + "additionalProperties": true + }, + "any": { + "additionalProperties": true + }, + "expression": { + "type": "object", + "additionalProperties": true + }, + "specificationExtension": { + "description": "Any property starting with x- is valid.", + "oneOf": [ + { + "type": "null" + }, + { + "type": "number" + }, + { + "type": "boolean" + }, + { + "type": "string" + }, + { + "type": "object" + }, + { + "type": "array" + } + ] + }, + "defaultType": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "array" + }, + { + "type": "object" + }, + { + "type": "number" + }, + { + "type": "boolean" + }, + { + "type": "string" + } + ] + } + } +} diff --git a/vendor/github.com/googleapis/gnostic/OpenAPIv3/schema-generator/3.0.md b/vendor/github.com/googleapis/gnostic/OpenAPIv3/schema-generator/3.0.md new file mode 100644 index 000000000..30a041ad8 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/OpenAPIv3/schema-generator/3.0.md @@ -0,0 +1,3441 @@ +# OpenAPI Specification + +#### Version 3.0.0 + +The key words "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL NOT", "SHOULD", "SHOULD NOT", "RECOMMENDED", "NOT RECOMMENDED", "MAY", and "OPTIONAL" in this document are to be interpreted as described in [BCP 14](https://tools.ietf.org/html/bcp14) [RFC2119](https://tools.ietf.org/html/rfc2119) [RFC8174](https://tools.ietf.org/html/rfc8174) when, and only when, they appear in all capitals, as shown here. + +This document is licensed under [The Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0.html). + +## Introduction + +The OpenAPI Specification (OAS) defines a standard, language-agnostic interface to RESTful APIs which allows both humans and computers to discover and understand the capabilities of the service without access to source code, documentation, or through network traffic inspection. When properly defined, a consumer can understand and interact with the remote service with a minimal amount of implementation logic. + +An OpenAPI definition can then be used by documentation generation tools to display the API, code generation tools to generate servers and clients in various programming languages, testing tools, and many other use cases. + +## Table of Contents + + +- [Definitions](#definitions) + - [OpenAPI Document](#oasDocument) + - [Path Templating](#pathTemplating) + - [Media Types](#mediaTypes) + - [HTTP Status Codes](#httpCodes) +- [Specification](#specification) + - [Versions](#versions) + - [Format](#format) + - [Document Structure](#documentStructure) + - [Data Types](#dataTypes) + - [Rich Text Formatting](#richText) + - [Relative References In URLs](#relativeReferences) + - [Schema](#schema) + - [OpenAPI Object](#oasObject) + - [Info Object](#infoObject) + - [Contact Object](#contactObject) + - [License Object](#licenseObject) + - [Server Object](#serverObject) + - [Server Variable Object](#serverVariableObject) + - [Components Object](#componentsObject) + - [Paths Object](#pathsObject) + - [Path Item Object](#pathItemObject) + - [Operation Object](#operationObject) + - [External Documentation Object](#externalDocumentationObject) + - [Parameter Object](#parameterObject) + - [Request Body Object](#requestBodyObject) + - [Media Type Object](#mediaTypeObject) + - [Encoding Object](#encodingObject) + - [Responses Object](#responsesObject) + - [Response Object](#responseObject) + - [Callback Object](#callbackObject) + - [Example Object](#exampleObject) + - [Link Object](#linkObject) + - [Header Object](#headerObject) + - [Tag Object](#tagObject) + - [Reference Object](#referenceObject) + - [Schema Object](#schemaObject) + - [Discriminator Object](#discriminatorObject) + - [XML Object](#xmlObject) + - [Security Scheme Object](#securitySchemeObject) + - [OAuth Flows Object](#oauthFlowsObject) + - [OAuth Flow Object](#oauthFlowObject) + - [Security Requirement Object](#securityRequirementObject) + - [Specification Extensions](#specificationExtensions) + - [Security Filtering](#securityFiltering) +- [Appendix A: Revision History](#revisionHistory) + + + + +## Definitions + +##### OpenAPI Document +A document (or set of documents) that defines or describes an API. An OpenAPI definition uses and conforms to the OpenAPI Specification. + +##### Path Templating +Path templating refers to the usage of curly braces ({}) to mark a section of a URL path as replaceable using path parameters. + +##### Media Types +Media type definitions are spread across several resources. +The media type definitions SHOULD be in compliance with [RFC6838](http://tools.ietf.org/html/rfc6838). + +Some examples of possible media type definitions: +``` + text/plain; charset=utf-8 + application/json + application/vnd.github+json + application/vnd.github.v3+json + application/vnd.github.v3.raw+json + application/vnd.github.v3.text+json + application/vnd.github.v3.html+json + application/vnd.github.v3.full+json + application/vnd.github.v3.diff + application/vnd.github.v3.patch +``` +##### HTTP Status Codes +The HTTP Status Codes are used to indicate the status of the executed operation. +The available status codes are defined by [RFC7231](http://tools.ietf.org/html/rfc7231#section-6) and registered status codes are listed in the [IANA Status Code Registry](http://www.iana.org/assignments/http-status-codes/http-status-codes.xhtml). + +## Specification + +### Versions + +The OpenAPI Specification is versioned using [Semantic Versioning 2.0.0](http://semver.org/spec/v2.0.0.html) (semver) and follows the semver specification. + +The `major`.`minor` portion of the semver (for example `3.0`) SHALL designate the OAS feature set. Typically, *`.patch`* versions address errors in this document, not the feature set. Tooling which supports OAS 3.0 SHOULD be compatible with all OAS 3.0.\* versions. The patch version SHOULD NOT be considered by tooling, making no distinction between `3.0.0` and `3.0.1` for example. + +Subsequent minor version releases of the OpenAPI Specification (incrementing the `minor` version number) SHOULD NOT interfere with tooling developed to a lower minor version and same major version. Thus a hypothetical `3.1.0` specification SHOULD be usable with tooling designed for `3.0.0`. + +An OpenAPI document compatible with OAS 3.\*.\* contains a required [`openapi`](#oasVersion) field which designates the semantic version of the OAS that it uses. (OAS 2.0 documents contain a top-level version field named [`swagger`](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#swaggerObject) and value `"2.0"`.) + +### Format + +An OpenAPI document that conforms to the OpenAPI Specification is itself a JSON object, which may be represented either in JSON or YAML format. + +For example, if a field has an array value, the JSON array representation will be used: + +```json +{ + "field": [ 1, 2, 3 ] +} +``` +All field names in the specification are **case sensitive**. + +The schema exposes two types of fields: Fixed fields, which have a declared name, and Patterned fields, which declare a regex pattern for the field name. + +Patterned fields MUST have unique names within the containing object. + +In order to preserve the ability to round-trip between YAML and JSON formats, YAML version [1.2](http://www.yaml.org/spec/1.2/spec.html) is RECOMMENDED along with some additional constraints: + +- Tags MUST be limited to those allowed by the [JSON Schema ruleset](http://www.yaml.org/spec/1.2/spec.html#id2803231). +- Keys used in YAML maps MUST be limited to a scalar string, as defined by the [YAML Failsafe schema ruleset](http://yaml.org/spec/1.2/spec.html#id2802346). + +**Note:** While APIs may be defined by OpenAPI documents in either YAML or JSON format, the API request and response bodies and other content are not required to be JSON or YAML. + +### Document Structure + +An OpenAPI document MAY be made up of a single document or be divided into multiple, connected parts at the discretion of the user. In the latter case, `$ref` fields MUST be used in the specification to reference those parts as follows from the [JSON Schema](http://json-schema.org) definitions. + +It is RECOMMENDED that the root OpenAPI document be named: `openapi.json` or `openapi.yaml`. + +### Data Types + +Primitive data types in the OAS are based on the types supported by the [JSON Schema Specification Wright Draft 00](https://tools.ietf.org/html/draft-wright-json-schema-00#section-4.2). +Note that `integer` as a type is also supported and is defined as a JSON number without a fraction or exponent part. +`null` is not supported as a type (see [`nullable`](#schemaNullable) for an alternative solution). +Models are defined using the [Schema Object](#schemaObject), which is an extended subset of JSON Schema Specification Wright Draft 00. + +Primitives have an optional modifier property: `format`. +OAS uses several known formats to define in fine detail the data type being used. +However, to support documentation needs, the `format` property is an open `string`-valued property, and can have any value. +Formats such as `"email"`, `"uuid"`, and so on, MAY be used even though undefined by this specification. +Types that are not accompanied by a `format` property follow the type definition in the JSON Schema. Tools that do not recognize a specific `format` MAY default back to the `type` alone, as if the `format` is not specified. + +The formats defined by the OAS are: + +Common Name | [`type`](#dataTypes) | [`format`](#dataTypeFormat) | Comments +----------- | ------ | -------- | -------- +integer | `integer` | `int32` | signed 32 bits +long | `integer` | `int64` | signed 64 bits +float | `number` | `float` | | +double | `number` | `double` | | +string | `string` | | | +byte | `string` | `byte` | base64 encoded characters +binary | `string` | `binary` | any sequence of octets +boolean | `boolean` | | | +date | `string` | `date` | As defined by `full-date` - [RFC3339](http://xml2rfc.ietf.org/public/rfc/html/rfc3339.html#anchor14) +dateTime | `string` | `date-time` | As defined by `date-time` - [RFC3339](http://xml2rfc.ietf.org/public/rfc/html/rfc3339.html#anchor14) +password | `string` | `password` | A hint to UIs to obscure input. + +### Rich Text Formatting +Throughout the specification `description` fields are noted as supporting CommonMark markdown formatting. +Where OpenAPI tooling renders rich text it MUST support, at a minimum, markdown syntax as described by [CommonMark 0.27](http://spec.commonmark.org/0.27/). Tooling MAY choose to ignore some CommonMark features to address security concerns. + +### Relative References in URLs + +Unless specified otherwise, all properties that are URLs MAY be relative references as defined by [RFC3986](https://tools.ietf.org/html/rfc3986#section-4.2). +Relative references are resolved using the URLs defined in the [`Server Object`](#serverObject) as a Base URI. + +Relative references used in `$ref` are processed as per [JSON Reference](https://tools.ietf.org/html/draft-pbryan-zyp-json-ref-03), using the URL of the current document as the base URI. See also the [Reference Object](#referenceObject). + +### Schema + +In the following description, if a field is not explicitly **REQUIRED** or described with a MUST or SHALL, it can be considered OPTIONAL. + +#### OpenAPI Object + +This is the root document object of the [OpenAPI document](#oasDocument). + +##### Fixed Fields + +Field Name | Type | Description +---|:---:|--- +openapi | `string` | **REQUIRED**. This string MUST be the [semantic version number](http://semver.org/spec/v2.0.0.html) of the [OpenAPI Specification version](#versions) that the OpenAPI document uses. The `openapi` field SHOULD be used by tooling specifications and clients to interpret the OpenAPI document. This is *not* related to the API [`info.version`](#infoVersion) string. +info | [Info Object](#infoObject) | **REQUIRED**. Provides metadata about the API. The metadata MAY be used by tooling as required. +servers | [[Server Object](#serverObject)] | An array of Server Objects, which provide connectivity information to a target server. If the `servers` property is not provided, or is an empty array, the default value would be a [Server Object](#serverObject) with a [url](#serverUrl) value of `/`. +paths | [Paths Object](#pathsObject) | **REQUIRED**. The available paths and operations for the API. +components | [Components Object](#componentsObject) | An element to hold various schemas for the specification. +security | [[Security Requirement Object](#securityRequirementObject)] | A declaration of which security mechanisms can be used across the API. The list of values includes alternative security requirement objects that can be used. Only one of the security requirement objects need to be satisfied to authorize a request. Individual operations can override this definition. +tags | [[Tag Object](#tagObject)] | A list of tags used by the specification with additional metadata. The order of the tags can be used to reflect on their order by the parsing tools. Not all tags that are used by the [Operation Object](#operationObject) must be declared. The tags that are not declared MAY be organized randomly or based on the tools' logic. Each tag name in the list MUST be unique. +externalDocs | [External Documentation Object](#externalDocumentationObject) | Additional external documentation. + +This object MAY be extended with [Specification Extensions](#specificationExtensions). + +#### Info Object + +The object provides metadata about the API. +The metadata MAY be used by the clients if needed, and MAY be presented in editing or documentation generation tools for convenience. + +##### Fixed Fields + +Field Name | Type | Description +---|:---:|--- +title | `string` | **REQUIRED**. The title of the application. +description | `string` | A short description of the application. [CommonMark syntax](http://spec.commonmark.org/) MAY be used for rich text representation. +termsOfService | `string` | A URL to the Terms of Service for the API. MUST be in the format of a URL. +contact | [Contact Object](#contactObject) | The contact information for the exposed API. +license | [License Object](#licenseObject) | The license information for the exposed API. +version | `string` | **REQUIRED**. The version of the OpenAPI document (which is distinct from the [OpenAPI Specification version](#oasVersion) or the API implementation version). + + +This object MAY be extended with [Specification Extensions](#specificationExtensions). + +##### Info Object Example: + +```json +{ + "title": "Sample Pet Store App", + "description": "This is a sample server for a pet store.", + "termsOfService": "http://example.com/terms/", + "contact": { + "name": "API Support", + "url": "http://www.example.com/support", + "email": "support@example.com" + }, + "license": { + "name": "Apache 2.0", + "url": "http://www.apache.org/licenses/LICENSE-2.0.html" + }, + "version": "1.0.1" +} +``` + +```yaml +title: Sample Pet Store App +description: This is a sample server for a pet store. +termsOfService: http://example.com/terms/ +contact: + name: API Support + url: http://www.example.com/support + email: support@example.com +license: + name: Apache 2.0 + url: http://www.apache.org/licenses/LICENSE-2.0.html +version: 1.0.1 +``` + +#### Contact Object + +Contact information for the exposed API. + +##### Fixed Fields + +Field Name | Type | Description +---|:---:|--- +name | `string` | The identifying name of the contact person/organization. +url | `string` | The URL pointing to the contact information. MUST be in the format of a URL. +email | `string` | The email address of the contact person/organization. MUST be in the format of an email address. + +This object MAY be extended with [Specification Extensions](#specificationExtensions). + +##### Contact Object Example: + +```json +{ + "name": "API Support", + "url": "http://www.example.com/support", + "email": "support@example.com" +} +``` + +```yaml +name: API Support +url: http://www.example.com/support +email: support@example.com +``` + +#### License Object + +License information for the exposed API. + +##### Fixed Fields + +Field Name | Type | Description +---|:---:|--- +name | `string` | **REQUIRED**. The license name used for the API. +url | `string` | A URL to the license used for the API. MUST be in the format of a URL. + +This object MAY be extended with [Specification Extensions](#specificationExtensions). + +##### License Object Example: + +```json +{ + "name": "Apache 2.0", + "url": "http://www.apache.org/licenses/LICENSE-2.0.html" +} +``` + +```yaml +name: Apache 2.0 +url: http://www.apache.org/licenses/LICENSE-2.0.html +``` + +#### Server Object + +An object representing a Server. + +##### Fixed Fields + +Field Name | Type | Description +---|:---:|--- +url | `string` | **REQUIRED**. A URL to the target host. This URL supports Server Variables and MAY be relative, to indicate that the host location is relative to the location where the OpenAPI document is being served. Variable substitutions will be made when a variable is named in `{`brackets`}`. +description | `string` | An optional string describing the host designated by the URL. [CommonMark syntax](http://spec.commonmark.org/) MAY be used for rich text representation. +variables | Map[`string`, [Server Variable Object](#serverVariableObject)] | A map between a variable name and its value. The value is used for substitution in the server's URL template. + +This object MAY be extended with [Specification Extensions](#specificationExtensions). + +##### Server Object Example + +A single server would be described as: + +```json +{ + "url": "https://development.gigantic-server.com/v1", + "description": "Development server" +} +``` + +```yaml +url: https://development.gigantic-server.com/v1 +description: Development server +``` + +The following shows how multiple servers can be described, for example, at the OpenAPI Object's [`servers`](#oasServers): + +```json +{ + "servers": [ + { + "url": "https://development.gigantic-server.com/v1", + "description": "Development server" + }, + { + "url": "https://staging.gigantic-server.com/v1", + "description": "Staging server" + }, + { + "url": "https://api.gigantic-server.com/v1", + "description": "Production server" + } + ] +} +``` + +```yaml +servers: +- url: https://development.gigantic-server.com/v1 + description: Development server +- url: https://staging.gigantic-server.com/v1 + description: Staging server +- url: https://api.gigantic-server.com/v1 + description: Production server +``` + +The following shows how variables can be used for a server configuration: + +```json +{ + "servers": [ + { + "url": "https://{username}.gigantic-server.com:{port}/{basePath}", + "description": "The production API server", + "variables": { + "username": { + "default": "demo", + "description": "this value is assigned by the service provider, in this example `gigantic-server.com`" + }, + "port": { + "enum": [ + "8443", + "443" + ], + "default": "8443" + }, + "basePath": { + "default": "v2" + } + } + } + ] +} +``` + +```yaml +servers: +- url: https://{username}.gigantic-server.com:{port}/{basePath} + description: The production API server + variables: + username: + # note! no enum here means it is an open value + default: demo + description: this value is assigned by the service provider, in this example `gigantic-server.com` + port: + enum: + - '8443' + - '443' + default: '8443' + basePath: + # open meaning there is the opportunity to use special base paths as assigned by the provider, default is `v2` + default: v2 +``` + + +#### Server Variable Object + +An object representing a Server Variable for server URL template substitution. + +##### Fixed Fields + +Field Name | Type | Description +---|:---:|--- +enum | [`string`] | An enumeration of string values to be used if the substitution options are from a limited set. +default | `string` | **REQUIRED**. The default value to use for substitution, and to send, if an alternate value is _not_ supplied. Unlike the [Schema Object's](#schemaObject) `default`, this value MUST be provided by the consumer. +description | `string` | An optional description for the server variable. [CommonMark syntax](http://spec.commonmark.org/) MAY be used for rich text representation. + +This object MAY be extended with [Specification Extensions](#specificationExtensions). + +#### Components Object + +Holds a set of reusable objects for different aspects of the OAS. +All objects defined within the components object will have no effect on the API unless they are explicitly referenced from properties outside the components object. + + +##### Fixed Fields + +Field Name | Type | Description +---|:---|--- + schemas | Map[`string`, [Schema Object](#schemaObject) \| [Reference Object](#referenceObject)] | An object to hold reusable [Schema Objects](#schemaObject). + responses | Map[`string`, [Response Object](#responseObject) \| [Reference Object](#referenceObject)] | An object to hold reusable [Response Objects](#responseObject). + parameters | Map[`string`, [Parameter Object](#parameterObject) \| [Reference Object](#referenceObject)] | An object to hold reusable [Parameter Objects](#parameterObject). + examples | Map[`string`, [Example Object](#exampleObject) \| [Reference Object](#referenceObject)] | An object to hold reusable [Example Objects](#exampleObject). + requestBodies | Map[`string`, [Request Body Object](#requestBodyObject) \| [Reference Object](#referenceObject)] | An object to hold reusable [Request Body Objects](#requestBodyObject). + headers | Map[`string`, [Header Object](#headerObject) \| [Reference Object](#referenceObject)] | An object to hold reusable [Header Objects](#headerObject). + securitySchemes| Map[`string`, [Security Scheme Object](#securitySchemeObject) \| [Reference Object](#referenceObject)] | An object to hold reusable [Security Scheme Objects](#securitySchemeObject). + links | Map[`string`, [Link Object](#linkObject) \| [Reference Object](#referenceObject)] | An object to hold reusable [Link Objects](#linkObject). + callbacks | Map[`string`, [Callback Object](#callbackObject) \| [Reference Object](#referenceObject)] | An object to hold reusable [Callback Objects](#callbackObject). + +This object MAY be extended with [Specification Extensions](#specificationExtensions). + +All the fixed fields declared above are objects that MUST use keys that match the regular expression: `^[a-zA-Z0-9\.\-_]+$`. + +Field Name Examples: + +``` +User +User_1 +User_Name +user-name +my.org.User +``` + +##### Components Object Example + +```json +"components": { + "schemas": { + "Category": { + "type": "object", + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "name": { + "type": "string" + } + } + }, + "Tag": { + "type": "object", + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "name": { + "type": "string" + } + } + } + }, + "parameters": { + "skipParam": { + "name": "skip", + "in": "query", + "description": "number of items to skip", + "required": true, + "schema": { + "type": "integer", + "format": "int32" + } + }, + "limitParam": { + "name": "limit", + "in": "query", + "description": "max records to return", + "required": true, + "schema" : { + "type": "integer", + "format": "int32" + } + } + }, + "responses": { + "NotFound": { + "description": "Entity not found." + }, + "IllegalInput": { + "description": "Illegal input for operation." + }, + "GeneralError": { + "description": "General Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GeneralError" + } + } + } + } + }, + "securitySchemes": { + "api_key": { + "type": "apiKey", + "name": "api_key", + "in": "header" + }, + "petstore_auth": { + "type": "oauth2", + "flows": { + "implicit": { + "authorizationUrl": "http://example.org/api/oauth/dialog", + "scopes": { + "write:pets": "modify pets in your account", + "read:pets": "read your pets" + } + } + } + } + } +} +``` + +```yaml +components: + schemas: + Category: + type: object + properties: + id: + type: integer + format: int64 + name: + type: string + Tag: + type: object + properties: + id: + type: integer + format: int64 + name: + type: string + parameters: + skipParam: + name: skip + in: query + description: number of items to skip + required: true + schema: + type: integer + format: int32 + limitParam: + name: limit + in: query + description: max records to return + required: true + schema: + type: integer + format: int32 + responses: + NotFound: + description: Entity not found. + IllegalInput: + description: Illegal input for operation. + GeneralError: + description: General Error + content: + application/json: + schema: + $ref: '#/components/schemas/GeneralError' + securitySchemes: + api_key: + type: apiKey + name: api_key + in: header + petstore_auth: + type: oauth2 + flows: + implicit: + authorizationUrl: http://example.org/api/oauth/dialog + scopes: + write:pets: modify pets in your account + read:pets: read your pets +``` + + +#### Paths Object + +Holds the relative paths to the individual endpoints and their operations. +The path is appended to the URL from the [`Server Object`](#serverObject) in order to construct the full URL. The Paths MAY be empty, due to [ACL constraints](#securityFiltering). + +##### Patterned Fields + +Field Pattern | Type | Description +---|:---:|--- +/{path} | [Path Item Object](#pathItemObject) | A relative path to an individual endpoint. The field name MUST begin with a slash. The path is **appended** (no relative URL resolution) to the expanded URL from the [`Server Object`](#serverObject)'s `url` field in order to construct the full URL. [Path templating](#pathTemplating) is allowed. When matching URLs, concrete (non-templated) paths would be matched before their templated counterparts. Templated paths with the same hierarchy but different templated names MUST NOT exist as they are identical. In case of ambiguous matching, it's up to the tooling to decide which one to use. + +This object MAY be extended with [Specification Extensions](#specificationExtensions). + +##### Path Templating Matching + +Assuming the following paths, the concrete definition, `/pets/mine`, will be matched first if used: + +``` + /pets/{petId} + /pets/mine +``` + +The following paths are considered identical and invalid: + +``` + /pets/{petId} + /pets/{name} +``` + +The following may lead to ambiguous resolution: + +``` + /{entity}/me + /books/{id} +``` + +##### Paths Object Example + +```json +{ + "/pets": { + "get": { + "description": "Returns all pets from the system that the user has access to", + "responses": { + "200": { + "description": "A list of pets.", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/pet" + } + } + } + } + } + } + } + } +} +``` + +```yaml +/pets: + get: + description: Returns all pets from the system that the user has access to + responses: + '200': + description: A list of pets. + content: + application/json: + schema: + type: array + items: + $ref: '#/components/schemas/pet' +``` + +#### Path Item Object + +Describes the operations available on a single path. +A Path Item MAY be empty, due to [ACL constraints](#securityFiltering). +The path itself is still exposed to the documentation viewer but they will not know which operations and parameters are available. + +##### Fixed Fields + +Field Name | Type | Description +---|:---:|--- +$ref | `string` | Allows for an external definition of this path item. The referenced structure MUST be in the format of a [Path Item Object](#pathItemObject). If there are conflicts between the referenced definition and this Path Item's definition, the behavior is *undefined*. +summary| `string` | An optional, string summary, intended to apply to all operations in this path. +description | `string` | An optional, string description, intended to apply to all operations in this path. [CommonMark syntax](http://spec.commonmark.org/) MAY be used for rich text representation. +get | [Operation Object](#operationObject) | A definition of a GET operation on this path. +put | [Operation Object](#operationObject) | A definition of a PUT operation on this path. +post | [Operation Object](#operationObject) | A definition of a POST operation on this path. +delete | [Operation Object](#operationObject) | A definition of a DELETE operation on this path. +options | [Operation Object](#operationObject) | A definition of a OPTIONS operation on this path. +head | [Operation Object](#operationObject) | A definition of a HEAD operation on this path. +patch | [Operation Object](#operationObject) | A definition of a PATCH operation on this path. +trace | [Operation Object](#operationObject) | A definition of a TRACE operation on this path. +servers | [[Server Object](#serverObject)] | An alternative `server` array to service all operations in this path. +parameters | [[Parameter Object](#parameterObject) \| [Reference Object](#referenceObject)] | A list of parameters that are applicable for all the operations described under this path. These parameters can be overridden at the operation level, but cannot be removed there. The list MUST NOT include duplicated parameters. A unique parameter is defined by a combination of a [name](#parameterName) and [location](#parameterIn). The list can use the [Reference Object](#referenceObject) to link to parameters that are defined at the [OpenAPI Object's components/parameters](#componentsParameters). + + +This object MAY be extended with [Specification Extensions](#specificationExtensions). + +##### Path Item Object Example + +```json +{ + "get": { + "description": "Returns pets based on ID", + "summary": "Find pets by ID", + "operationId": "getPetsById", + "responses": { + "200": { + "description": "pet response", + "content": { + "*/*": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/Pet" + } + } + } + } + }, + "default": { + "description": "error payload", + "content": { + "text/html": { + "schema": { + "$ref": "#/components/schemas/ErrorModel" + } + } + } + } + } + }, + "parameters": [ + { + "name": "id", + "in": "path", + "description": "ID of pet to use", + "required": true, + "schema": { + "type": "array", + "items": { + "type": "string" + } + }, + "style": "simple" + } + ] +} +``` + +```yaml +get: + description: Returns pets based on ID + summary: Find pets by ID + operationId: getPetsById + responses: + '200': + description: pet response + content: + '*/*' : + schema: + type: array + items: + $ref: '#/components/schemas/Pet' + default: + description: error payload + content: + 'text/html': + schema: + $ref: '#/components/schemas/ErrorModel' +parameters: +- name: id + in: path + description: ID of pet to use + required: true + schema: + type: array + style: simple + items: + type: string +``` + +#### Operation Object + +Describes a single API operation on a path. + +##### Fixed Fields + +Field Name | Type | Description +---|:---:|--- +tags | [`string`] | A list of tags for API documentation control. Tags can be used for logical grouping of operations by resources or any other qualifier. +summary | `string` | A short summary of what the operation does. +description | `string` | A verbose explanation of the operation behavior. [CommonMark syntax](http://spec.commonmark.org/) MAY be used for rich text representation. +externalDocs | [External Documentation Object](#externalDocumentationObject) | Additional external documentation for this operation. +operationId | `string` | Unique string used to identify the operation. The id MUST be unique among all operations described in the API. Tools and libraries MAY use the operationId to uniquely identify an operation, therefore, it is RECOMMENDED to follow common programming naming conventions. +parameters | [[Parameter Object](#parameterObject) \| [Reference Object](#referenceObject)] | A list of parameters that are applicable for this operation. If a parameter is already defined at the [Path Item](#pathItemParameters), the new definition will override it but can never remove it. The list MUST NOT include duplicated parameters. A unique parameter is defined by a combination of a [name](#parameterName) and [location](#parameterIn). The list can use the [Reference Object](#referenceObject) to link to parameters that are defined at the [OpenAPI Object's components/parameters](#componentsParameters). +requestBody | [Request Body Object](#requestBodyObject) \| [Reference Object](#referenceObject) | The request body applicable for this operation. The `requestBody` is only supported in HTTP methods where the HTTP 1.1 specification [RFC7231](https://tools.ietf.org/html/rfc7231#section-4.3.1) has explicitly defined semantics for request bodies. In other cases where the HTTP spec is vague, `requestBody` SHALL be ignored by consumers. +responses | [Responses Object](#responsesObject) | **REQUIRED**. The list of possible responses as they are returned from executing this operation. +callbacks | Map[`string`, [Callback Object](#callbackObject) \| [Reference Object](#referenceObject)] | A map of possible out-of band callbacks related to the parent operation. The key is a unique identifier for the Callback Object. Each value in the map is a [Callback Object](#callbackObject) that describes a request that may be initiated by the API provider and the expected responses. The key value used to identify the callback object is an expression, evaluated at runtime, that identifies a URL to use for the callback operation. +deprecated | `boolean` | Declares this operation to be deprecated. Consumers SHOULD refrain from usage of the declared operation. Default value is `false`. +security | [[Security Requirement Object](#securityRequirementObject)] | A declaration of which security mechanisms can be used for this operation. The list of values includes alternative security requirement objects that can be used. Only one of the security requirement objects need to be satisfied to authorize a request. This definition overrides any declared top-level [`security`](#oasSecurity). To remove a top-level security declaration, an empty array can be used. +servers | [[Server Object](#serverObject)] | An alternative `server` array to service this operation. If an alternative `server` object is specified at the Path Item Object or Root level, it will be overridden by this value. + +This object MAY be extended with [Specification Extensions](#specificationExtensions). + +##### Operation Object Example + +```json +{ + "tags": [ + "pet" + ], + "summary": "Updates a pet in the store with form data", + "operationId": "updatePetWithForm", + "parameters": [ + { + "name": "petId", + "in": "path", + "description": "ID of pet that needs to be updated", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/x-www-form-urlencoded": { + "schema": { + "type": "object", + "properties": { + "name": { + "description": "Updated name of the pet", + "type": "string" + }, + "status": { + "description": "Updated status of the pet", + "type": "string" + } + }, + "required": ["status"] + } + } + } + }, + "responses": { + "200": { + "description": "Pet updated.", + "content": { + "application/json": {}, + "application/xml": {} + } + }, + "405": { + "description": "Invalid input", + "content": { + "application/json": {}, + "application/xml": {} + } + } + }, + "security": [ + { + "petstore_auth": [ + "write:pets", + "read:pets" + ] + } + ] +} +``` + +```yaml +tags: +- pet +summary: Updates a pet in the store with form data +operationId: updatePetWithForm +parameters: +- name: petId + in: path + description: ID of pet that needs to be updated + required: true + schema: + type: string +requestBody: + content: + 'application/x-www-form-urlencoded': + schema: + properties: + name: + description: Updated name of the pet + type: string + status: + description: Updated status of the pet + type: string + required: + - status +responses: + '200': + description: Pet updated. + content: + 'application/json': {} + 'application/xml': {} + '405': + description: Invalid input + content: + 'application/json': {} + 'application/xml': {} +security: +- petstore_auth: + - write:pets + - read:pets +``` + + +#### External Documentation Object + +Allows referencing an external resource for extended documentation. + +##### Fixed Fields + +Field Name | Type | Description +---|:---:|--- +description | `string` | A short description of the target documentation. [CommonMark syntax](http://spec.commonmark.org/) MAY be used for rich text representation. +url | `string` | **REQUIRED**. The URL for the target documentation. Value MUST be in the format of a URL. + +This object MAY be extended with [Specification Extensions](#specificationExtensions). + +##### External Documentation Object Example + +```json +{ + "description": "Find more info here", + "url": "https://example.com" +} +``` + +```yaml +description: Find more info here +url: https://example.com +``` + +#### Parameter Object + +Describes a single operation parameter. + +A unique parameter is defined by a combination of a [name](#parameterName) and [location](#parameterIn). + +##### Parameter Locations +There are four possible parameter locations specified by the `in` field: +* path - Used together with [Path Templating](#pathTemplating), where the parameter value is actually part of the operation's URL. This does not include the host or base path of the API. For example, in `/items/{itemId}`, the path parameter is `itemId`. +* query - Parameters that are appended to the URL. For example, in `/items?id=###`, the query parameter is `id`. +* header - Custom headers that are expected as part of the request. Note that [RFC7230](https://tools.ietf.org/html/rfc7230#page-22) states header names are case insensitive. +* cookie - Used to pass a specific cookie value to the API. + + +##### Fixed Fields +Field Name | Type | Description +---|:---:|--- +name | `string` | **REQUIRED**. The name of the parameter. Parameter names are *case sensitive*.
  • If [`in`](#parameterIn) is `"path"`, the `name` field MUST correspond to the associated path segment from the [path](#pathsPath) field in the [Paths Object](#pathsObject). See [Path Templating](#pathTemplating) for further information.
  • If [`in`](#parameterIn) is `"header"` and the `name` field is `"Accept"`, `"Content-Type"` or `"Authorization"`, the parameter definition SHALL be ignored.
  • For all other cases, the `name` corresponds to the parameter name used by the [`in`](#parameterIn) property.
+in | `string` | **REQUIRED**. The location of the parameter. Possible values are "query", "header", "path" or "cookie". +description | `string` | A brief description of the parameter. This could contain examples of use. [CommonMark syntax](http://spec.commonmark.org/) MAY be used for rich text representation. +required | `boolean` | Determines whether this parameter is mandatory. If the [parameter location](#parameterIn) is "path", this property is **REQUIRED** and its value MUST be `true`. Otherwise, the property MAY be included and its default value is `false`. + deprecated | `boolean` | Specifies that a parameter is deprecated and SHOULD be transitioned out of usage. + allowEmptyValue | `boolean` | Sets the ability to pass empty-valued parameters. This is valid only for `query` parameters and allows sending a parameter with an empty value. Default value is `false`. If [`style`](#parameterStyle) is used, and if behavior is `n/a` (cannot be serialized), the value of `allowEmptyValue` SHALL be ignored. + +The rules for serialization of the parameter are specified in one of two ways. +For simpler scenarios, a [`schema`](#parameterSchema) and [`style`](#parameterStyle) can describe the structure and syntax of the parameter. + +Field Name | Type | Description +---|:---:|--- +style | `string` | Describes how the parameter value will be serialized depending on the type of the parameter value. Default values (based on value of `in`): for `query` - `form`; for `path` - `simple`; for `header` - `simple`; for `cookie` - `form`. +explode | `boolean` | When this is true, parameter values of type `array` or `object` generate separate parameters for each value of the array or key-value pair of the map. For other types of parameters this property has no effect. When [`style`](#parameterStyle) is `form`, the default value is `true`. For all other styles, the default value is `false`. +allowReserved | `boolean` | Determines whether the parameter value SHOULD allow reserved characters, as defined by [RFC3986](https://tools.ietf.org/html/rfc3986#section-2.2) `:/?#[]@!$&'()*+,;=` to be included without percent-encoding. This property only applies to parameters with an `in` value of `query`. The default value is `false`. +schema | [Schema Object](#schemaObject) \| [Reference Object](#referenceObject) | The schema defining the type used for the parameter. +example | Any | Example of the media type. The example SHOULD match the specified schema and encoding properties if present. The `example` object is mutually exclusive of the `examples` object. Furthermore, if referencing a `schema` which contains an example, the `example` value SHALL _override_ the example provided by the schema. To represent examples of media types that cannot naturally be represented in JSON or YAML, a string value can contain the example with escaping where necessary. +examples | Map[ `string`, [Example Object](#exampleObject) \| [Reference Object](#referenceObject)] | Examples of the media type. Each example SHOULD contain a value in the correct format as specified in the parameter encoding. The `examples` object is mutually exclusive of the `example` object. Furthermore, if referencing a `schema` which contains an example, the `examples` value SHALL _override_ the example provided by the schema. + +For more complex scenarios, the [`content`](#parameterContent) property can define the media type and schema of the parameter. +A parameter MUST contain either a `schema` property, or a `content` property, but not both. +When `example` or `examples` are provided in conjunction with the `schema` object, the example MUST follow the prescribed serialization strategy for the parameter. + + +Field Name | Type | Description +---|:---:|--- +content | Map[`string`, [Media Type Object](#mediaTypeObject)] | A map containing the representations for the parameter. The key is the media type and the value describes it. The map MUST only contain one entry. + +##### Style Values + +In order to support common ways of serializing simple parameters, a set of `style` values are defined. + +`style` | [`type`](#dataTypes) | `in` | Comments +----------- | ------ | -------- | -------- +matrix | `primitive`, `array`, `object` | `path` | Path-style parameters defined by [RFC6570](https://tools.ietf.org/html/rfc6570#section-3.2.7) +label | `primitive`, `array`, `object` | `path` | Label style parameters defined by [RFC6570](https://tools.ietf.org/html/rfc6570#section-3.2.5) +form | `primitive`, `array`, `object` | `query`, `cookie` | Form style parameters defined by [RFC6570](https://tools.ietf.org/html/rfc6570#section-3.2.8). This option replaces `collectionFormat` with a `csv` (when `explode` is false) or `multi` (when `explode` is true) value from OpenAPI 2.0. +simple | `array` | `path`, `header` | Simple style parameters defined by [RFC6570](https://tools.ietf.org/html/rfc6570#section-3.2.2). This option replaces `collectionFormat` with a `csv` value from OpenAPI 2.0. +spaceDelimited | `array` | `query` | Space separated array values. This option replaces `collectionFormat` equal to `ssv` from OpenAPI 2.0. +pipeDelimited | `array` | `query` | Pipe separated array values. This option replaces `collectionFormat` equal to `pipes` from OpenAPI 2.0. +deepObject | `object` | `query` | Provides a simple way of rendering nested objects using form parameters. + + +##### Style Examples + +Assume a parameter named `color` has one of the following values: + +``` + string -> "blue" + array -> ["blue","black","brown"] + object -> { "R": 100, "G": 200, "B": 150 } +``` +The following table shows examples of rendering differences for each value. + +[`style`](#dataTypeFormat) | `explode` | `empty` | `string` | `array` | `object` +----------- | ------ | -------- | -------- | --------|------- +matrix | false | ;color | ;color=blue | ;color=blue,black,brown | ;color=R,100,G,200,B,150 +matrix | true | ;color | ;color=blue | ;color=blue;color=black;color=brown | ;R=100;G=200;B=150 +label | false | . | .blue | .blue.black.brown | .R.100.G.200.B.150 +label | true | . | .blue | .blue.black.brown | .R=100.G=200.B=150 +form | false | color= | color=blue | color=blue,black,brown | color=R,100,G,200,B,150 +form | true | color= | color=blue | color=blue&color=black&color=brown | R=100&G=200&B=150 +simple | false | n/a | blue | blue,black,brown | R,100,G,200,B,150 +simple | true | n/a | blue | blue,black,brown | R=100,G=200,B=150 +spaceDelimited | false | n/a | n/a | blue%20black%20brown | R%20100%20G%20200%20B%20150 +pipeDelimited | false | n/a | n/a | blue\|black\|brown | R\|100\|G\|200|G\|150 +deepObject | true | n/a | n/a | n/a | color[R]=100&color[G]=200&color[B]=150 + +This object MAY be extended with [Specification Extensions](#specificationExtensions). + +##### Parameter Object Examples + +A header parameter with an array of 64 bit integer numbers: + +```json +{ + "name": "token", + "in": "header", + "description": "token to be passed as a header", + "required": true, + "schema": { + "type": "array", + "items": { + "type": "integer", + "format": "int64" + } + }, + "style": "simple" +} +``` + +```yaml +name: token +in: header +description: token to be passed as a header +required: true +schema: + type: array + items: + type: integer + format: int64 +style: simple +``` + +A path parameter of a string value: +```json +{ + "name": "username", + "in": "path", + "description": "username to fetch", + "required": true, + "schema": { + "type": "string" + } +} +``` + +```yaml +name: username +in: path +description: username to fetch +required: true +schema: + type: string +``` + +An optional query parameter of a string value, allowing multiple values by repeating the query parameter: +```json +{ + "name": "id", + "in": "query", + "description": "ID of the object to fetch", + "required": false, + "schema": { + "type": "array", + "items": { + "type": "string" + } + }, + "style": "form", + "explode": true +} +``` + +```yaml +name: id +in: query +description: ID of the object to fetch +required: false +schema: + type: array + items: + type: string +style: form +explode: true +``` + +A free-form query parameter, allowing undefined parameters of a specific type: +```json +{ + "in": "query", + "name": "freeForm", + "schema": { + "type": "object", + "additionalProperties": { + "type": "integer" + }, + }, + "style": "form" +} +``` + +```yaml +in: query +name: freeForm +schema: + type: object + additionalProperties: + type: integer +style: form +``` + +A complex parameter using `content` to define serialization: + +```json +{ + "in": "query", + "name": "coordinates", + "content": { + "application/json": { + "schema": { + "type": "object", + "required": [ + "lat", + "long" + ], + "properties": { + "lat": { + "type": "number" + }, + "long": { + "type": "number" + } + } + } + } + } +} +``` + +```yaml +in: query +name: coordinates +content: + application/json: + schema: + type: object + required: + - lat + - long + properties: + lat: + type: number + long: + type: number +``` + +#### Request Body Object + +Describes a single request body. + +##### Fixed Fields +Field Name | Type | Description +---|:---:|--- +description | `string` | A brief description of the request body. This could contain examples of use. [CommonMark syntax](http://spec.commonmark.org/) MAY be used for rich text representation. +content | Map[`string`, [Media Type Object](#mediaTypeObject)] | **REQUIRED**. The content of the request body. The key is a media type or [media type range](https://tools.ietf.org/html/rfc7231#appendix-D) and the value describes it. For requests that match multiple keys, only the most specific key is applicable. e.g. text/plain overrides text/* +required | `boolean` | Determines if the request body is required in the request. Defaults to `false`. + + +This object MAY be extended with [Specification Extensions](#specificationExtensions). + +##### Request Body Examples + +A request body with a referenced model definition. +```json +{ + "description": "user to add to the system", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/User" + }, + "examples": { + "user" : { + "summary": "User Example", + "externalValue": "http://foo.bar/examples/user-example.json" + } + } + }, + "application/xml": { + "schema": { + "$ref": "#/components/schemas/User" + }, + "examples": { + "user" : { + "summary": "User example in XML", + "externalValue": "http://foo.bar/examples/user-example.xml" + } + } + }, + "text/plain": { + "examples": { + "user" : { + "summary": "User example in Plain text", + "externalValue": "http://foo.bar/examples/user-example.txt" + } + } + }, + "*/*": { + "examples": { + "user" : { + "summary": "User example in other format", + "externalValue": "http://foo.bar/examples/user-example.whatever" + } + } + } + } +} +``` + +```yaml +description: user to add to the system +content: + 'application/json': + schema: + $ref: '#/components/schemas/User' + examples: + user: + summary: User Example + externalValue: 'http://foo.bar/examples/user-example.json' + 'application/xml': + schema: + $ref: '#/components/schemas/User' + examples: + user: + summary: User Example in XML + externalValue: 'http://foo.bar/examples/user-example.xml' + 'text/plain': + examples: + user: + summary: User example in text plain format + externalValue: 'http://foo.bar/examples/user-example.txt' + '*/*': + examples: + user: + summary: User example in other format + externalValue: 'http://foo.bar/examples/user-example.whatever' +``` + +A body parameter that is an array of string values: +```json +{ + "description": "user to add to the system", + "content": { + "text/plain": { + "schema": { + "type": "array", + "items": { + "type": "string" + } + } + } + } +} +``` + +```yaml +description: user to add to the system +required: true +content: + text/plain: + schema: + type: array + items: + type: string +``` + + +#### Media Type Object +Each Media Type Object provides schema and examples for the media type identified by its key. + +##### Fixed Fields +Field Name | Type | Description +---|:---:|--- +schema | [Schema Object](#schemaObject) \| [Reference Object](#referenceObject) | The schema defining the type used for the request body. +example | Any | Example of the media type. The example object SHOULD be in the correct format as specified by the media type. The `example` object is mutually exclusive of the `examples` object. Furthermore, if referencing a `schema` which contains an example, the `example` value SHALL _override_ the example provided by the schema. +examples | Map[ `string`, [Example Object](#exampleObject) \| [Reference Object](#referenceObject)] | Examples of the media type. Each example object SHOULD match the media type and specified schema if present. The `examples` object is mutually exclusive of the `example` object. Furthermore, if referencing a `schema` which contains an example, the `examples` value SHALL _override_ the example provided by the schema. +encoding | Map[`string`, [Encoding Object](#encodingObject)] | A map between a property name and its encoding information. The key, being the property name, MUST exist in the schema as a property. The encoding object SHALL only apply to `requestBody` objects when the media type is `multipart` or `application/x-www-form-urlencoded`. + +This object MAY be extended with [Specification Extensions](#specificationExtensions). + +##### Media Type Examples + +```js +{ + "application/json": { + "schema": { + "$ref": "#/components/schemas/Pet" + }, + "examples": { + "cat" : { + "summary": "An example of a cat", + "value": + { + "name": "Fluffy", + "petType": "Cat", + "color": "White", + "gender": "male", + "breed": "Persian" + } + }, + "dog": { + "summary": "An example of a dog with a cat's name", + "value" : { + "name": "Puma", + "petType": "Dog", + "color": "Black", + "gender": "Female", + "breed": "Mixed" + }, + "frog": { + "$ref": "#/components/examples/frog-example" + } + } + } + } +} +``` + +```yaml +application/json: + schema: + $ref: "#/components/schemas/Pet" + examples: + cat: + summary: An example of a cat + value: + name: Fluffy + petType: Cat + color: White + gender: male + breed: Persian + dog: + summary: An example of a dog with a cat's name + value: + name: Puma + petType: Dog + color: Black + gender: Female + breed: Mixed + frog: + $ref: "#/components/examples/frog-example" +``` + +##### Considerations for File Uploads + +In contrast with the 2.0 specification, `file` input/output content in OpenAPI is described with the same semantics as any other schema type. Specifically: + +```yaml +# content transferred with base64 encoding +schema: + type: string + format: base64 +``` + +```yaml +# content transferred in binary (octet-stream): +schema: + type: string + format: binary +``` + +These examples apply to either input payloads of file uploads or response payloads. + +A `requestBody` for submitting a file in a `POST` operation may look like the following example: + +```yaml +requestBody: + content: + application/octet-stream: + # any media type is accepted, functionally equivalent to `*/*` + schema: + # a binary file of any type + type: string + format: binary +``` + +In addition, specific media types MAY be specified: + +```yaml +# multiple, specific media types may be specified: +requestBody: + content: + # a binary file of type png or jpeg + 'image/jpeg': + schema: + type: string + format: binary + 'image/png': + schema: + type: string + format: binary +``` + +To upload multiple files, a `multipart` media type MUST be used: + +```yaml +requestBody: + content: + multipart/form-data: + schema: + properties: + # The property name 'file' will be used for all files. + file: + type: array + items: + type: string + format: binary + +``` + +##### Support for x-www-form-urlencoded Request Bodies + +To submit content using form url encoding via [RFC1866](https://tools.ietf.org/html/rfc1866), the following +definition may be used: + +```yaml +requestBody: + content: + application/x-www-form-urlencoded: + schema: + type: object + properties: + id: + type: string + format: uuid + address: + # complex types are stringified to support RFC 1866 + type: object + properties: {} +``` + +In this example, the contents in the `requestBody` MUST be stringified per [RFC1866](https://tools.ietf.org/html/rfc1866/) when passed to the server. In addition, the `address` field complex object will be stringified. + +When passing complex objects in the `application/x-www-form-urlencoded` content type, the default serialization strategy of such properties is described in the [`Encoding Object`](#encodingObject)'s [`style`](#encodingStyle) property as `form`. + +##### Special Considerations for `multipart` Content + +It is common to use `multipart/form-data` as a `Content-Type` when transferring request bodies to operations. In contrast to 2.0, a `schema` is REQUIRED to define the input parameters to the operation when using `multipart` content. This supports complex structures as well as supporting mechanisms for multiple file uploads. + +When passing in `multipart` types, boundaries MAY be used to separate sections of the content being transferred — thus, the following default `Content-Type`s are defined for `multipart`: + +* If the property is a primitive, or an array of primitive values, the default Content-Type is `text/plain` +* If the property is complex, or an array of complex values, the default Content-Type is `application/json` +* If the property is a `type: string` with `format: binary` or `format: base64` (aka a file object), the default Content-Type is `application/octet-stream` + + +Examples: + +```yaml +requestBody: + content: + multipart/form-data: + schema: + type: object + properties: + id: + type: string + format: uuid + address: + # default Content-Type for objects is `application/json` + type: object + properties: {} + profileImage: + # default Content-Type for string/binary is `application/octet-stream` + type: string + format: binary + children: + # default Content-Type for arrays is based on the `inner` type (text/plain here) + type: array + items: + type: string + addresses: + # default Content-Type for arrays is based on the `inner` type (object shown, so `application/json` in this example) + type: array + items: + type: '#/components/schemas/Address' +``` + +An `encoding` attribute is introduced to give you control over the serialization of parts of `multipart` request bodies. This attribute is _only_ applicable to `multipart` and `application/x-www-form-urlencoded` request bodies. + +#### Encoding Object + +A single encoding definition applied to a single schema property. + +##### Fixed Fields +Field Name | Type | Description +---|:---:|--- +contentType | `string` | The Content-Type for encoding a specific property. Default value depends on the property type: for `string` with `format` being `binary` – `application/octet-stream`; for other primitive types – `text/plain`; for `object` - `application/json`; for `array` – the default is defined based on the inner type. The value can be a specific media type (e.g. `application/json`), a wildcard media type (e.g. `image/*`), or a comma-separated list of the two types. +headers | Map[`string`, [Header Object](#headerObject) \| [Reference Object](#referenceObject)] | A map allowing additional information to be provided as headers, for example `Content-Disposition`. `Content-Type` is described separately and SHALL be ignored in this section. This property SHALL be ignored if the request body media type is not a `multipart`. +style | `string` | Describes how a specific property value will be serialized depending on its type. See [Parameter Object](#parameterObject) for details on the [`style`](#parameterStyle) property. The behavior follows the same values as `query` parameters, including default values. This property SHALL be ignored if the request body media type is not `application/x-www-form-urlencoded`. +explode | `boolean` | When this is true, property values of type `array` or `object` generate separate parameters for each value of the array, or key-value-pair of the map. For other types of properties this property has no effect. When [`style`](#encodingStyle) is `form`, the default value is `true`. For all other styles, the default value is `false`. This property SHALL be ignored if the request body media type is not `application/x-www-form-urlencoded`. +allowReserved | `boolean` | Determines whether the parameter value SHOULD allow reserved characters, as defined by [RFC3986](https://tools.ietf.org/html/rfc3986#section-2.2) `:/?#[]@!$&'()*+,;=` to be included without percent-encoding. The default value is `false`. This property SHALL be ignored if the request body media type is not `application/x-www-form-urlencoded`. + +This object MAY be extended with [Specification Extensions](#specificationExtensions). + +##### Encoding Object Example + +```yaml +requestBody: + content: + multipart/mixed: + schema: + type: object + properties: + id: + # default is text/plain + type: string + format: uuid + address: + # default is application/json + type: object + properties: {} + historyMetadata: + # need to declare XML format! + description: metadata in XML format + type: object + properties: {} + profileImage: + # default is application/octet-stream, need to declare an image type only! + type: string + format: binary + encoding: + historyMetadata: + # require XML Content-Type in utf-8 encoding + contentType: application/xml; charset=utf-8 + profileImage: + # only accept png/jpeg + contentType: image/png, image/jpeg + headers: + X-Rate-Limit-Limit: + description: The number of allowed requests in the current period + schema: + type: integer +``` + +#### Responses Object + +A container for the expected responses of an operation. +The container maps a HTTP response code to the expected response. + +The documentation is not necessarily expected to cover all possible HTTP response codes because they may not be known in advance. +However, documentation is expected to cover a successful operation response and any known errors. + +The `default` MAY be used as a default response object for all HTTP codes +that are not covered individually by the specification. + +The `Responses Object` MUST contain at least one response code, and it +SHOULD be the response for a successful operation call. + +##### Fixed Fields +Field Name | Type | Description +---|:---:|--- +default | [Response Object](#responseObject) \| [Reference Object](#referenceObject) | The documentation of responses other than the ones declared for specific HTTP response codes. Use this field to cover undeclared responses. A [Reference Object](#referenceObject) can link to a response that the [OpenAPI Object's components/responses](#componentsResponses) section defines. + +##### Patterned Fields +Field Pattern | Type | Description +---|:---:|--- +[HTTP Status Code](#httpCodes) | [Response Object](#responseObject) \| [Reference Object](#referenceObject) | Any [HTTP status code](#httpCodes) can be used as the property name, but only one property per code, to describe the expected response for that HTTP status code. A [Reference Object](#referenceObject) can link to a response that is defined in the [OpenAPI Object's components/responses](#componentsResponses) section. This field MUST be enclosed in quotation marks (for example, "200") for compatibility between JSON and YAML. To define a range of response codes, this field MAY contain the uppercase wildcard character `X`. For example, `2XX` represents all response codes between `[200-299]`. The following range definitions are allowed: `1XX`, `2XX`, `3XX`, `4XX`, and `5XX`. If a response range is defined using an explicit code, the explicit code definition takes precedence over the range definition for that code. + + +This object MAY be extended with [Specification Extensions](#specificationExtensions). + +##### Responses Object Example + +A 200 response for a successful operation and a default response for others (implying an error): + +```json +{ + "200": { + "description": "a pet to be returned", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Pet" + } + } + } + }, + "default": { + "description": "Unexpected error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorModel" + } + } + } + } +} +``` + +```yaml +'200': + description: a pet to be returned + content: + application/json: + schema: + $ref: '#/components/schemas/Pet' +default: + description: Unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/ErrorModel' +``` + +#### Response Object +Describes a single response from an API Operation, including design-time, static +`links` to operations based on the response. + +##### Fixed Fields +Field Name | Type | Description +---|:---:|--- +description | `string` | **REQUIRED**. A short description of the response. [CommonMark syntax](http://spec.commonmark.org/) MAY be used for rich text representation. +headers | Map[`string`, [Header Object](#headerObject) \| [Reference Object](#referenceObject)] | Maps a header name to its definition. [RFC7230](https://tools.ietf.org/html/rfc7230#page-22) states header names are case insensitive. If a response header is defined with the name `"Content-Type"`, it SHALL be ignored. +content | Map[`string`, [Media Type Object](#mediaTypeObject)] | A map containing descriptions of potential response payloads. The key is a media type or [media type range](https://tools.ietf.org/html/rfc7231#appendix-D) and the value describes it. For responses that match multiple keys, only the most specific key is applicable. e.g. text/plain overrides text/* +links | Map[`string`, [Link Object](#linkObject) \| [Reference Object](#referenceObject)] | A map of operations links that can be followed from the response. The key of the map is a short name for the link, following the naming constraints of the names for [Component Objects](#componentsObject). + +This object MAY be extended with [Specification Extensions](#specificationExtensions). + +##### Response Object Examples + +Response of an array of a complex type: + +```json +{ + "description": "A complex object array response", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/VeryComplexType" + } + } + } + } +} +``` + +```yaml +description: A complex object array response +content: + application/json: + schema: + type: array + items: + $ref: '#/components/schemas/VeryComplexType' +``` + +Response with a string type: + +```json +{ + "description": "A simple string response", + "content": { + "text/plain": { + "schema": { + "type": "string" + } + } + } + +} +``` + +```yaml +description: A simple string response +representations: + text/plain: + schema: + type: string +``` + +Plain text response with headers: + +```json +{ + "description": "A simple string response", + "content": { + "text/plain": { + "schema": { + "type": "string" + } + } + }, + "headers": { + "X-Rate-Limit-Limit": { + "description": "The number of allowed requests in the current period", + "schema": { + "type": "integer" + } + }, + "X-Rate-Limit-Remaining": { + "description": "The number of remaining requests in the current period", + "schema": { + "type": "integer" + } + }, + "X-Rate-Limit-Reset": { + "description": "The number of seconds left in the current period", + "schema": { + "type": "integer" + } + } + } +} +``` + +```yaml +description: A simple string response +content: + text/plain: + schema: + type: string + example: 'whoa!' +headers: + X-Rate-Limit-Limit: + description: The number of allowed requests in the current period + schema: + type: integer + X-Rate-Limit-Remaining: + description: The number of remaining requests in the current period + schema: + type: integer + X-Rate-Limit-Reset: + description: The number of seconds left in the current period + schema: + type: integer +``` + +Response with no return value: + +```json +{ + "description": "object created" +} +``` + +```yaml +description: object created +``` + +#### Callback Object + +A map of possible out-of band callbacks related to the parent operation. +Each value in the map is a [Path Item Object](#pathItemObject) that describes a set of requests that may be initiated by the API provider and the expected responses. +The key value used to identify the callback object is an expression, evaluated at runtime, that identifies a URL to use for the callback operation. + +##### Patterned Fields +Field Pattern | Type | Description +---|:---:|--- +{expression} | [Path Item Object](#pathItemObject) | A Path Item Object used to define a callback request and expected responses. A [complete example](../examples/v3.0/callback-example.yaml) is available. + +This object MAY be extended with [Specification Extensions](#specificationExtensions). + +##### Key Expression + +The key that identifies the [Path Item Object](#pathItemObject) is a [runtime expression](#runtimeExpression) that can be evaluated in the context of a runtime HTTP request/response to identify the URL to be used for the callback request. +A simple example might be `$request.body#/url`. +However, using a [runtime expression](#runtimeExpression) the complete HTTP message can be accessed. +This includes accessing any part of a body that a JSON Pointer [RFC6901](https://tools.ietf.org/html/rfc6901) can reference. + +For example, given the following HTTP request: + +```http +POST /subscribe/myevent?queryUrl=http://clientdomain.com/stillrunning HTTP/1.1 +Host: example.org +Content-Type: application/json +Content-Length: 187 + +{ + "failedUrl" : "http://clientdomain.com/failed", + "successUrls" : [ + "http://clientdomain.com/fast", + "http://clientdomain.com/medium", + "http://clientdomain.com/slow" + ] +} + +201 Created +Location: http://example.org/subscription/1 +``` + +The following examples show how the various expressions evaluate, assuming the callback operation has a path parameter named `eventType` and a query parameter named `queryUrl`. + +Expression | Value +---|:--- +$url | http://example.org/subscribe/myevent?queryUrl=http://clientdomain.com/stillrunning +$method | POST +$request.path.eventType | myevent +$request.query.queryUrl | http://clientdomain.com/stillrunning +$request.header.content-Type | application/json +$request.body#/failedUrl | http://clientdomain.com/stillrunning +$request.body#/successUrls/2 | http://clientdomain.com/medium +$response.header.Location | http://example.org/subscription/1 + + +##### Callback Object Example + +The following example shows a callback to the URL specified by the `id` and `email` property in the request body. + +```yaml +myWebhook: + 'http://notificationServer.com?transactionId={$request.body#/id}&email={$request.body#/email}': + post: + requestBody: + description: Callback payload + content: + 'application/json': + schema: + $ref: '#/components/schemas/SomePayload' + responses: + '200': + description: webhook successfully processed and no retries will be performed +``` + + +#### Example Object + +##### Fixed Fields +Field Name | Type | Description +---|:---:|--- +summary | `string` | Short description for the example. +description | `string` | Long description for the example. [CommonMark syntax](http://spec.commonmark.org/) MAY be used for rich text representation. +value | Any | Embedded literal example. The `value` field and `externalValue` field are mutually exclusive. To represent examples of media types that cannot naturally represented in JSON or YAML, use a string value to contain the example, escaping where necessary. +externalValue | `string` | A URL that points to the literal example. This provides the capability to reference examples that cannot easily be included in JSON or YAML documents. The `value` field and `externalValue` field are mutually exclusive. + +This object MAY be extended with [Specification Extensions](#specificationExtensions). + +In all cases, the example value is expected to be compatible with the type schema +of its associated value. Tooling implementations MAY choose to +validate compatibility automatically, and reject the example value(s) if incompatible. + +##### Example Object Example + +```yaml +# in a model +schemas: + properties: + name: + type: string + examples: + name: + $ref: http://example.org/petapi-examples/openapi.json#/components/examples/name-example + +# in a request body: + requestBody: + content: + 'application/json': + schema: + $ref: '#/components/schemas/Address' + examples: + foo: + summary: A foo example + value: {"foo": "bar"} + bar: + summary: A bar example + value: {"bar": "baz"} + 'application/xml': + examples: + xmlExample: + summary: This is an example in XML + externalValue: 'http://example.org/examples/address-example.xml' + 'text/plain': + examples: + textExample: + summary: This is a text example + externalValue: 'http://foo.bar/examples/address-example.txt' + + +# in a parameter + parameters: + - name: 'zipCode' + in: 'query' + schema: + type: 'string' + format: 'zip-code' + examples: + zip-example: + $ref: '#/components/examples/zip-example' + +# in a response + responses: + '200': + description: your car appointment has been booked + content: + application/json: + schema: + $ref: '#/components/schemas/SuccessResponse' + examples: + confirmation-success: + $ref: '#/components/examples/confirmation-success' +``` + + +#### Link Object + +The `Link object` represents a possible design-time link for a response. +The presence of a link does not guarantee the caller's ability to successfully invoke it, rather it provides a known relationship and traversal mechanism between responses and other operations. + +Unlike _dynamic_ links (i.e. links provided **in** the response payload), the OAS linking mechanism does not require link information in the runtime response. + +For computing links, and providing instructions to execute them, a [runtime expression](#runtimeExpression) is used for accessing values in an operation and using them as parameters while invoking the linked operation. + +##### Fixed Fields + +Field Name | Type | Description +---|:---:|--- +operationRef | `string` | A relative or absolute reference to an OAS operation. This field is mutually exclusive of the `operationId` field, and MUST point to an [Operation Object](#operationObject). Relative `operationRef` values MAY be used to locate an existing [Operation Object](#operationObject) in the OpenAPI definition. +operationId | `string` | The name of an _existing_, resolvable OAS operation, as defined with a unique `operationId`. This field is mutually exclusive of the `operationRef` field. +parameters | Map[`string`, Any \| [{expression}](#runtimeExpression)] | A map representing parameters to pass to an operation as specified with `operationId` or identified via `operationRef`. The key is the parameter name to be used, whereas the value can be a constant or an expression to be evaluated and passed to the linked operation. The parameter name can be qualified using the [parameter location](#parameterIn) `[{in}.]{name}` for operations that use the same parameter name in different locations (e.g. path.id). +requestBody | Any \| [{expression}](#runtimeExpression) | A literal value or [{expression}](#runtimeExpression) to use as a request body when calling the target operation. +description | `string` | A description of the link. [CommonMark syntax](http://spec.commonmark.org/) MAY be used for rich text representation. +server | [Server Object](#serverObject) | A server object to be used by the target operation. + +This object MAY be extended with [Specification Extensions](#specificationExtensions). + +A linked operation MUST be identified using either an `operationRef` or `operationId`. +In the case of an `operationId`, it MUST be unique and resolved in the scope of the OAS document. +Because of the potential for name clashes, the `operationRef` syntax is preferred +for specifications with external references. + +##### Examples + +Computing a link from a request operation where the `$request.path.id` is used to pass a request parameter to the linked operation. + +```yaml +paths: + /users/{id}: + parameters: + - name: id + in: path + required: true + description: the user identifier, as userId + schema: + type: string + get: + responses: + '200': + description: the user being returned + content: + application/json: + schema: + type: object + properties: + uuid: # the unique user id + type: string + format: uuid + links: + address: + # the target link operationId + operationId: getUserAddress + parameters: + # get the `id` field from the request path parameter named `id` + userId: $request.path.id + # the path item of the linked operation + /users/{userid}/address: + parameters: + - name: userid + in: path + required: true + description: the user identifier, as userId + schema: + type: string + # linked operation + get: + operationId: getUserAddress + responses: + '200': + description: the user's address +``` + +When a runtime expression fails to evaluate, no parameter value is passed to the target operation. + +Values from the response body can be used to drive a linked operation. + +```yaml +links: + address: + operationId: getUserAddressByUUID + parameters: + # get the `id` field from the request path parameter named `id` + userUuid: $response.body#/uuid +``` + +Clients follow all links at their discretion. +Neither permissions, nor the capability to make a successful call to that link, is guaranteed +solely by the existence of a relationship. + + +##### OperationRef Examples + +As references to `operationId` MAY NOT be possible (the `operationId` is an optional +value), references MAY also be made through a relative `operationRef`: + +```yaml +links: + UserRepositories: + # returns array of '#/components/schemas/repository' + operationRef: '#/paths/~12.0~1repositories~1{username}/get' + parameters: + username: $response.body#/username +``` + +or an absolute `operationRef`: + +```yaml +links: + UserRepositories: + # returns array of '#/components/schemas/repository' + operationRef: 'https://na2.gigantic-server.com/#/paths/~12.0~1repositories~1{username}/get' + parameters: + username: $response.body#/username +``` + +Note that in the use of `operationRef`, the _escaped forward-slash_ is necessary when +using JSON references. + + +##### Runtime Expressions + +Runtime expressions allow defining values based on information that will only be available within the HTTP message in an actual API call. +This mechanism is used by [Link Objects](#linkObject) and [Callback Objects](#callbackObject). + +The runtime expression is defined by the following [ABNF](https://tools.ietf.org/html/rfc5234) syntax + +``` + expression = ( "$url" | "$method" | "$statusCode" | "$request." source | "$response." source ) + source = ( header-reference | query-reference | path-reference | body-reference ) + header-reference = "header." token + query-reference = "query." name + path-reference = "path." name + body-reference = "body" ["#" fragment] + fragment = a JSON Pointer [RFC 6901](https://tools.ietf.org/html/rfc6901) + name = *( char ) + char = as per RFC [7159](https://tools.ietf.org/html/rfc7159#section-7) + token = as per RFC [7230](https://tools.ietf.org/html/rfc7230#section-3.2.6) +``` + +The `name` identifier is case-sensitive, whereas `token` is not. + +The table below provides examples of runtime expressions and examples of their use in a value: + +##### Examples + +Source Location | example expression | notes +---|:---|:---| +HTTP Method | `$method` | The allowable values for the `$method` will be those for the HTTP operation. +Requested media type | `$request.header.accept` | +Request parameter | `$request.path.id` | Request parameters MUST be declared in the `parameters` section of the parent operation or they cannot be evaluated. This includes request headers. +Request body property | `$request.body#/user/uuid` | In operations which accept payloads, references may be made to portions of the `requestBody` or the entire body. +Request URL | `$url` | +Response value | `$response.body#/status` | In operations which return payloads, references may be made to portions of the response body or the entire body. +Response header | `$response.header.Server` | Single header values only are available + +Runtime expressions preserve the type of the referenced value. +Expressions can be embedded into string values by surrounding the expression with `{}` curly braces. + +#### Header Object + +The Header Object follows the structure of the [Parameter Object](#parameterObject) with the following changes: + +1. `name` MUST NOT be specified, it is given in the corresponding `headers` map. +1. `in` MUST NOT be specified, it is implicitly in `header`. +1. All traits that are affected by the location MUST be applicable to a location of `header` (for example, [`style`](#parameterStyle)). + +##### Header Object Example + +A simple header of type `integer`: + +```json +{ + "description": "The number of allowed requests in the current period", + "schema": { + "type": "integer" + } +} +``` + +```yaml +description: The number of allowed requests in the current period +schema: + type: integer +``` + +#### Tag Object + +Adds metadata to a single tag that is used by the [Operation Object](#operationObject). +It is not mandatory to have a Tag Object per tag defined in the Operation Object instances. + +##### Fixed Fields +Field Name | Type | Description +---|:---:|--- +name | `string` | **REQUIRED**. The name of the tag. +description | `string` | A short description for the tag. [CommonMark syntax](http://spec.commonmark.org/) MAY be used for rich text representation. +externalDocs | [External Documentation Object](#externalDocumentationObject) | Additional external documentation for this tag. + +This object MAY be extended with [Specification Extensions](#specificationExtensions). + +##### Tag Object Example + +```json +{ + "name": "pet", + "description": "Pets operations" +} +``` + +```yaml +name: pet +description: Pets operations +``` + +#### Examples Object + +In an `example`, a JSON Reference MAY be used, with the +explicit restriction that examples having a JSON format with object named +`$ref` are not allowed. Therefore, that `example`, structurally, can be +either a string primitive or an object, similar to `additionalProperties`. + +In all cases, the payload is expected to be compatible with the type schema +for the associated value. Tooling implementations MAY choose to +validate compatibility automatically, and reject the example value(s) if they +are incompatible. + +```yaml +# in a model +schemas: + properties: + name: + type: string + example: + $ref: http://foo.bar#/examples/name-example + +# in a request body, note the plural `examples` + requestBody: + content: + 'application/json': + schema: + $ref: '#/components/schemas/Address' + examples: + foo: + value: {"foo": "bar"} + bar: + value: {"bar": "baz"} + 'application/xml': + examples: + xml: + externalValue: 'http://foo.bar/examples/address-example.xml' + 'text/plain': + examples: + text: + externalValue: 'http://foo.bar/examples/address-example.txt' + +# in a parameter + parameters: + - name: 'zipCode' + in: 'query' + schema: + type: 'string' + format: 'zip-code' + example: + $ref: 'http://foo.bar#/examples/zip-example' + +# in a response, note the singular `example`: + responses: + '200': + description: your car appointment has been booked + content: + application/json: + schema: + $ref: '#/components/schemas/SuccessResponse' + example: + $ref: http://foo.bar#/examples/address-example.json +``` + +#### Reference Object + +A simple object to allow referencing other components in the specification, internally and externally. + +The Reference Object is defined by [JSON Reference](https://tools.ietf.org/html/draft-pbryan-zyp-json-ref-03) and follows the same structure, behavior and rules. + +For this specification, reference resolution is accomplished as defined by the JSON Reference specification and not by the JSON Schema specification. + +##### Fixed Fields +Field Name | Type | Description +---|:---:|--- +$ref | `string` | **REQUIRED**. The reference string. + +This object cannot be extended with additional properties and any properties added SHALL be ignored. + +##### Reference Object Example + +```json +{ + "$ref": "#/components/schemas/Pet" +} +``` + +```yaml +$ref: '#/components/schemas/Pet' +``` + +##### Relative Schema Document Example +```json +{ + "$ref": "Pet.json" +} +``` + +```yaml +$ref: Pet.yaml +``` + +##### Relative Documents With Embedded Schema Example +```json +{ + "$ref": "definitions.json#/Pet" +} +``` + +```yaml +$ref: definitions.yaml#/Pet +``` + +#### Schema Object + +The Schema Object allows the definition of input and output data types. +These types can be objects, but also primitives and arrays. +This object is an extended subset of the [JSON Schema Specification Wright Draft 00](http://json-schema.org/). + +For more information about the properties, see [JSON Schema Core](https://tools.ietf.org/html/draft-wright-json-schema-00) and [JSON Schema Validation](https://tools.ietf.org/html/draft-wright-json-schema-validation-00). +Unless stated otherwise, the property definitions follow the JSON Schema. + +##### Properties + +The following properties are taken directly from the JSON Schema definition and follow the same specifications: + +- title +- multipleOf +- maximum +- exclusiveMaximum +- minimum +- exclusiveMinimum +- maxLength +- minLength +- pattern (This string SHOULD be a valid regular expression, according to the [ECMA 262 regular expression](https://www.ecma-international.org/ecma-262/5.1/#sec-7.8.5) dialect) +- maxItems +- minItems +- uniqueItems +- maxProperties +- minProperties +- required +- enum + +The following properties are taken from the JSON Schema definition but their definitions were adjusted to the OpenAPI Specification. +- type - Value MUST be a string. Multiple types via an array are not supported. +- allOf - Inline or referenced schema MUST be of a [Schema Object](#schemaObject) and not a standard JSON Schema. +- oneOf - Inline or referenced schema MUST be of a [Schema Object](#schemaObject) and not a standard JSON Schema. +- anyOf - Inline or referenced schema MUST be of a [Schema Object](#schemaObject) and not a standard JSON Schema. +- not - Inline or referenced schema MUST be of a [Schema Object](#schemaObject) and not a standard JSON Schema. +- items - Value MUST be an object and not an array. Inline or referenced schema MUST be of a [Schema Object](#schemaObject) and not a standard JSON Schema. `items` MUST be present if the `type` is `array`. +- properties - Property definitions MUST be a [Schema Object](#schemaObject) and not a standard JSON Schema (inline or referenced). +- additionalProperties - Value can be boolean or object. Inline or referenced schema MUST be of a [Schema Object](#schemaObject) and not a standard JSON Schema. +- description - [CommonMark syntax](http://spec.commonmark.org/) MAY be used for rich text representation. +- format - See [Data Type Formats](#dataTypeFormat) for further details. While relying on JSON Schema's defined formats, the OAS offers a few additional predefined formats. +- default - The default value represents what would be assumed by the consumer of the input as the value of the schema if one is not provided. Unlike JSON Schema, the value MUST conform to the defined type for the Schema Object defined at the same level. For example, if `type` is `string`, then `default` can be `"foo"` but cannot be `1`. + +Alternatively, any time a Schema Object can be used, a [Reference Object](#referenceObject) can be used in its place. This allows referencing definitions instead of defining them inline. + +Additional properties defined by the JSON Schema specification that are not mentioned here are strictly unsupported. + +Other than the JSON Schema subset fields, the following fields MAY be used for further schema documentation: + +##### Fixed Fields +Field Name | Type | Description +---|:---:|--- +nullable | `boolean` | Allows sending a `null` value for the defined schema. Default value is `false`. +discriminator | [Discriminator Object](#discriminatorObject) | Adds support for polymorphism. The discriminator is an object name that is used to differentiate between other schemas which may satisfy the payload description. See [Composition and Inheritance](#schemaComposition) for more details. +readOnly | `boolean` | Relevant only for Schema `"properties"` definitions. Declares the property as "read only". This means that it MAY be sent as part of a response but SHOULD NOT be sent as part of the request. If the property is marked as `readOnly` being `true` and is in the `required` list, the `required` will take effect on the response only. A property MUST NOT be marked as both `readOnly` and `writeOnly` being `true`. Default value is `false`. +writeOnly | `boolean` | Relevant only for Schema `"properties"` definitions. Declares the property as "write only". Therefore, it MAY be sent as part of a request but SHOULD NOT be sent as part of the response. If the property is marked as `writeOnly` being `true` and is in the `required` list, the `required` will take effect on the request only. A property MUST NOT be marked as both `readOnly` and `writeOnly` being `true`. Default value is `false`. +xml | [XML Object](#xmlObject) | This MAY be used only on properties schemas. It has no effect on root schemas. Adds additional metadata to describe the XML representation of this property. +externalDocs | [External Documentation Object](#externalDocumentationObject) | Additional external documentation for this schema. +example | Any | A free-form property to include an example of an instance for this schema. To represent examples that cannot be naturally represented in JSON or YAML, a string value can be used to contain the example with escaping where necessary. + deprecated | `boolean` | Specifies that a schema is deprecated and SHOULD be transitioned out of usage. Default value is `false`. + +This object MAY be extended with [Specification Extensions](#specificationExtensions). + +###### Composition and Inheritance (Polymorphism) + +The OpenAPI Specification allows combining and extending model definitions using the `allOf` property of JSON Schema, in effect offering model composition. +`allOf` takes an array of object definitions that are validated *independently* but together compose a single object. + +While composition offers model extensibility, it does not imply a hierarchy between the models. +To support polymorphism, the OpenAPI Specification adds the `discriminator` field. +When used, the `discriminator` will be the name of the property that decides which schema definition validates the structure of the model. +As such, the `discriminator` field MUST be a required field. +There are are two ways to define the value of a discriminator for an inheriting instance. +- Use the schema name. +- Override the schema name by overriding the property with a new value. If a new value exists, this takes precedence over the schema name. +As such, inline schema definitions, which do not have a given id, *cannot* be used in polymorphism. + +###### XML Modeling + +The [xml](#schemaXml) property allows extra definitions when translating the JSON definition to XML. +The [XML Object](#xmlObject) contains additional information about the available options. + +##### Schema Object Examples + +###### Primitive Sample + +```json +{ + "type": "string", + "format": "email" +} +``` + +```yaml +type: string +format: email +``` + +###### Simple Model + +```json +{ + "type": "object", + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string" + }, + "address": { + "$ref": "#/components/schemas/Address" + }, + "age": { + "type": "integer", + "format": "int32", + "minimum": 0 + } + } +} +``` + +```yaml +type: object +required: +- name +properties: + name: + type: string + address: + $ref: '#/components/schemas/Address' + age: + type: integer + format: int32 + minimum: 0 +``` + +###### Model with Map/Dictionary Properties + +For a simple string to string mapping: + +```json +{ + "type": "object", + "additionalProperties": { + "type": "string" + } +} +``` + +```yaml +type: object +additionalProperties: + type: string +``` + +For a string to model mapping: + +```json +{ + "type": "object", + "additionalProperties": { + "$ref": "#/components/schemas/ComplexModel" + } +} +``` + +```yaml +type: object +additionalProperties: + $ref: '#/components/schemas/ComplexModel' +``` + +###### Model with Example + +```json +{ + "type": "object", + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "name": { + "type": "string" + } + }, + "required": [ + "name" + ], + "example": { + "name": "Puma", + "id": 1 + } +} +``` + +```yaml +type: object +properties: + id: + type: integer + format: int64 + name: + type: string +required: +- name +example: + name: Puma + id: 1 +``` + +###### Models with Composition + +```json +{ + "components": { + "schemas": { + "ErrorModel": { + "type": "object", + "required": [ + "message", + "code" + ], + "properties": { + "message": { + "type": "string" + }, + "code": { + "type": "integer", + "minimum": 100, + "maximum": 600 + } + } + }, + "ExtendedErrorModel": { + "allOf": [ + { + "$ref": "#/components/schemas/ErrorModel" + }, + { + "type": "object", + "required": [ + "rootCause" + ], + "properties": { + "rootCause": { + "type": "string" + } + } + } + ] + } + } + } +} +``` + +```yaml +components: + schemas: + ErrorModel: + type: object + required: + - message + - code + properties: + message: + type: string + code: + type: integer + minimum: 100 + maximum: 600 + ExtendedErrorModel: + allOf: + - $ref: '#/components/schemas/ErrorModel' + - type: object + required: + - rootCause + properties: + rootCause: + type: string +``` + +###### Models with Polymorphism Support + +```json +{ + "components": { + "schemas": { + "Pet": { + "type": "object", + "discriminator": { + "propertyName": "petType" + }, + "properties": { + "name": { + "type": "string" + }, + "petType": { + "type": "string" + } + }, + "required": [ + "name", + "petType" + ] + }, + "Cat": { + "description": "A representation of a cat. Note that `Cat` will be used as the discriminator value.", + "allOf": [ + { + "$ref": "#/components/schemas/Pet" + }, + { + "type": "object", + "properties": { + "huntingSkill": { + "type": "string", + "description": "The measured skill for hunting", + "default": "lazy", + "enum": [ + "clueless", + "lazy", + "adventurous", + "aggressive" + ] + } + }, + "required": [ + "huntingSkill" + ] + } + ] + }, + "Dog": { + "description": "A representation of a dog. Note that `Dog` will be used as the discriminator value.", + "allOf": [ + { + "$ref": "#/components/schemas/Pet" + }, + { + "type": "object", + "properties": { + "packSize": { + "type": "integer", + "format": "int32", + "description": "the size of the pack the dog is from", + "default": 0, + "minimum": 0 + } + }, + "required": [ + "packSize" + ] + } + ] + } + } + } +} +``` + +```yaml +components: + schemas: + Pet: + type: object + discriminator: + propertyName: petType + properties: + name: + type: string + petType: + type: string + required: + - name + - petType + Cat: ## "Cat" will be used as the discriminator value + description: A representation of a cat + allOf: + - $ref: '#/components/schemas/Pet' + - type: object + properties: + huntingSkill: + type: string + description: The measured skill for hunting + enum: + - clueless + - lazy + - adventurous + - aggressive + required: + - huntingSkill + Dog: ## "Dog" will be used as the discriminator value + description: A representation of a dog + allOf: + - $ref: '#/components/schemas/Pet' + - type: object + properties: + packSize: + type: integer + format: int32 + description: the size of the pack the dog is from + default: 0 + minimum: 0 + required: + - packSize +``` + +#### Discriminator Object + +When request bodies or response payloads may be one of a number of different schemas, a `discriminator` object can be used to aid in serialization, deserialization, and validation. The discriminator is a specific object in a schema which is used to inform the consumer of the specification of an alternative schema based on the value associated with it. + +When using the discriminator, _inline_ schemas will not be considered. + +##### Fixed Fields +Field Name | Type | Description +---|:---:|--- +propertyName | `string` | **REQUIRED**. The name of the property in the payload that will hold the discriminator value. + mapping | Map[`string`, `string`] | An object to hold mappings between payload values and schema names or references. + +The discriminator attribute is legal only when using one of the composite keywords `oneOf`, `anyOf`, `allOf`. + +In OAS 3.0, a response payload MAY be described to be exactly one of any number of types: + +``` +MyResponseType: + oneOf: + - $ref: '#/components/schemas/Cat' + - $ref: '#/components/schemas/Dog' + - $ref: '#/components/schemas/Lizard' +``` + +which means the payload _MUST_, by validation, match exactly one of the schemas described by `Cat`, `Dog`, or `Lizard`. In this case, a discriminator MAY act as a "hint" to shortcut validation and selection of the matching schema which may be a costly operation, depending on the complexity of the schema. We can then describe exactly which field tells us which schema to use: + + +``` +MyResponseType: + oneOf: + - $ref: '#/components/schemas/Cat' + - $ref: '#/components/schemas/Dog' + - $ref: '#/components/schemas/Lizard' + discriminator: + propertyName: pet_type +``` + +The expectation now is that a property with name `pet_type` _MUST_ be present in the response payload, and the value will correspond to the name of a schema defined in the OAS document. Thus the response payload: + +``` +{ + "id": 12345, + "pet_type": "Cat" +} +``` + +Will indicate that the `Cat` schema be used in conjunction with this payload. + +In scenarios where the value of the discriminator field does not match the schema name or implicit mapping is not possible, an optional `mapping` definition MAY be used: + +``` +MyResponseType: + oneOf: + - $ref: '#/components/schemas/Cat' + - $ref: '#/components/schemas/Dog' + - $ref: '#/components/schemas/Lizard' + - $ref: 'https://gigantic-server.com/schemas/Monster/schema.json' + discriminator: + propertyName: pet_type + mapping: + dog: '#/components/schemas/Dog' + monster: 'https://gigantic-server.com/schemas/Monster/schema.json' +``` + +Here the discriminator _value_ of `dog` will map to the schema `#/components/schemas/Dog`, rather than the default (implicit) value of `Dog`. If the discriminator _value_ does not match an implicit or explicit mapping, no schema can be determined and validation SHOULD fail. Mapping keys MUST be string values, but tooling MAY convert response values to strings for comparison. + +When used in conjunction with the `anyOf` construct, the use of the discriminator can avoid ambiguity where multiple schemas may satisfy a single payload. + +In both the `oneOf` and `anyOf` use cases, all possible schemas MUST be listed explicitly. To avoid redundancy, the discriminator MAY be added to a parent schema definition, and all schemas comprising the parent schema in an `allOf` construct may be used as an alternate schema. + +For example: + +``` +components: + schemas: + Pet: + type: object + required: + - pet_type + properties: + pet_type: + type: string + discriminator: + propertyName: pet_type + mapping: + cachorro: Dog + Cat: + allOf: + - $ref: '#/components/schemas/Pet' + - type: object + # all other properties specific to a `Cat` + properties: + name: + type: string + Dog: + allOf: + - $ref: '#/components/schemas/Pet' + - type: object + # all other properties specific to a `Dog` + properties: + bark: + type: string + Lizard: + allOf: + - $ref: '#/components/schemas/Pet' + - type: object + # all other properties specific to a `Lizard` + properties: + lovesRocks: + type: boolean +``` + +a payload like this: + +``` +{ + "pet_type": "Cat", + "name": "misty" +} +``` + +will indicate that the `Cat` schema be used. Likewise this schema: + +``` +{ + "pet_type": "cachorro", + "bark": "soft" +} +``` + +will map to `Dog` because of the definition in the `mappings` element. + + +#### XML Object + +A metadata object that allows for more fine-tuned XML model definitions. + +When using arrays, XML element names are *not* inferred (for singular/plural forms) and the `name` property SHOULD be used to add that information. +See examples for expected behavior. + +##### Fixed Fields +Field Name | Type | Description +---|:---:|--- +name | `string` | Replaces the name of the element/attribute used for the described schema property. When defined within `items`, it will affect the name of the individual XML elements within the list. When defined alongside `type` being `array` (outside the `items`), it will affect the wrapping element and only if `wrapped` is `true`. If `wrapped` is `false`, it will be ignored. +namespace | `string` | The URI of the namespace definition. Value MUST be in the form of an absolute URI. +prefix | `string` | The prefix to be used for the [name](#xmlName). +attribute | `boolean` | Declares whether the property definition translates to an attribute instead of an element. Default value is `false`. +wrapped | `boolean` | MAY be used only for an array definition. Signifies whether the array is wrapped (for example, ``) or unwrapped (``). Default value is `false`. The definition takes effect only when defined alongside `type` being `array` (outside the `items`). + +This object MAY be extended with [Specification Extensions](#specificationExtensions). + +##### XML Object Examples + +The examples of the XML object definitions are included inside a property definition of a [Schema Object](#schemaObject) with a sample of the XML representation of it. + +###### No XML Element + +Basic string property: + +```json +{ + "animals": { + "type": "string" + } +} +``` + +```yaml +animals: + type: string +``` + +```xml +... +``` + +Basic string array property ([`wrapped`](#xmlWrapped) is `false` by default): + +```json +{ + "animals": { + "type": "array", + "items": { + "type": "string" + } + } +} +``` + +```yaml +animals: + type: array + items: + type: string +``` + +```xml +... +... +... +``` + +###### XML Name Replacement + +```json +{ + "animals": { + "type": "string", + "xml": { + "name": "animal" + } + } +} +``` + +```yaml +animals: + type: string + xml: + name: animal +``` + +```xml +... +``` + + +###### XML Attribute, Prefix and Namespace + +In this example, a full model definition is shown. + +```json +{ + "Person": { + "type": "object", + "properties": { + "id": { + "type": "integer", + "format": "int32", + "xml": { + "attribute": true + } + }, + "name": { + "type": "string", + "xml": { + "namespace": "http://example.com/schema/sample", + "prefix": "sample" + } + } + } + } +} +``` + +```yaml +Person: + type: object + properties: + id: + type: integer + format: int32 + xml: + attribute: true + name: + type: string + xml: + namespace: http://example.com/schema/sample + prefix: sample +``` + +```xml + + example + +``` + +###### XML Arrays + +Changing the element names: + +```json +{ + "animals": { + "type": "array", + "items": { + "type": "string", + "xml": { + "name": "animal" + } + } + } +} +``` + +```yaml +animals: + type: array + items: + type: string + xml: + name: animal +``` + +```xml +value +value +``` + +The external `name` property has no effect on the XML: + +```json +{ + "animals": { + "type": "array", + "items": { + "type": "string", + "xml": { + "name": "animal" + } + }, + "xml": { + "name": "aliens" + } + } +} +``` + +```yaml +animals: + type: array + items: + type: string + xml: + name: animal + xml: + name: aliens +``` + +```xml +value +value +``` + +Even when the array is wrapped, if a name is not explicitly defined, the same name will be used both internally and externally: + +```json +{ + "animals": { + "type": "array", + "items": { + "type": "string" + }, + "xml": { + "wrapped": true + } + } +} +``` + +```yaml +animals: + type: array + items: + type: string + xml: + wrapped: true +``` + +```xml + + value + value + +``` + +To overcome the naming problem in the example above, the following definition can be used: + +```json +{ + "animals": { + "type": "array", + "items": { + "type": "string", + "xml": { + "name": "animal" + } + }, + "xml": { + "wrapped": true + } + } +} +``` + +```yaml +animals: + type: array + items: + type: string + xml: + name: animal + xml: + wrapped: true +``` + +```xml + + value + value + +``` + +Affecting both internal and external names: + +```json +{ + "animals": { + "type": "array", + "items": { + "type": "string", + "xml": { + "name": "animal" + } + }, + "xml": { + "name": "aliens", + "wrapped": true + } + } +} +``` + +```yaml +animals: + type: array + items: + type: string + xml: + name: animal + xml: + name: aliens + wrapped: true +``` + +```xml + + value + value + +``` + +If we change the external element but not the internal ones: + +```json +{ + "animals": { + "type": "array", + "items": { + "type": "string" + }, + "xml": { + "name": "aliens", + "wrapped": true + } + } +} +``` + +```yaml +animals: + type: array + items: + type: string + xml: + name: aliens + wrapped: true +``` + +```xml + + value + value + +``` + +#### Security Scheme Object + +Defines a security scheme that can be used by the operations. +Supported schemes are HTTP authentication, an API key (either as a header or as a query parameter), OAuth2's common flows (implicit, password, application and access code) as defined in [RFC6749](https://tools.ietf.org/html/rfc6749), and [OpenID Connect Discovery](https://tools.ietf.org/html/draft-ietf-oauth-discovery-06). + +##### Fixed Fields +Field Name | Type | Applies To | Description +---|:---:|---|--- +type | `string` | Any | **REQUIRED**. The type of the security scheme. Valid values are `"apiKey"`, `"http"`, `"oauth2"`, `"openIdConnect"`. +description | `string` | Any | A short description for security scheme. [CommonMark syntax](http://spec.commonmark.org/) MAY be used for rich text representation. +name | `string` | `apiKey` | **REQUIRED**. The name of the header, query or cookie parameter to be used. +in | `string` | `apiKey` | **REQUIRED**. The location of the API key. Valid values are `"query"`, `"header"` or `"cookie"`. +scheme | `string` | `http` | **REQUIRED**. The name of the HTTP Authorization scheme to be used in the [Authorization header as defined in RFC7235](https://tools.ietf.org/html/rfc7235#section-5.1). +bearerFormat | `string` | `http` (`"bearer"`) | A hint to the client to identify how the bearer token is formatted. Bearer tokens are usually generated by an authorization server, so this information is primarily for documentation purposes. +flows | [OAuth Flows Object](#oauthFlowsObject) | `oauth2` | **REQUIRED**. An object containing configuration information for the flow types supported. +openIdConnectUrl | `string` | `openIdConnect` | **REQUIRED**. OpenId Connect URL to discover OAuth2 configuration values. This MUST be in the form of a URL. + +This object MAY be extended with [Specification Extensions](#specificationExtensions). + +##### Security Scheme Object Example + +###### Basic Authentication Sample + +```json +{ + "type": "http", + "scheme": "basic" +} +``` + +```yaml +type: http +scheme: basic +``` + +###### API Key Sample + +```json +{ + "type": "apiKey", + "name": "api_key", + "in": "header" +} +``` + +```yaml +type: apiKey +name: api_key +in: header +``` + +###### JWT Bearer Sample + +```json +{ + "type": "http", + "scheme": "bearer", + "bearerFormat": "JWT", +} +``` + +```yaml +type: http +scheme: bearer +bearerFormat: JWT +``` + +###### Implicit OAuth2 Sample + +```json +{ + "type": "oauth2", + "flows": { + "implicit": { + "authorizationUrl": "https://example.com/api/oauth/dialog", + "scopes": { + "write:pets": "modify pets in your account", + "read:pets": "read your pets" + } + } + } +} +``` + +```yaml +type: oauth2 +flows: + implicit: + authorizationUrl: https://example.com/api/oauth/dialog + scopes: + write:pets: modify pets in your account + read:pets: read your pets +``` + +#### OAuth Flows Object + +Allows configuration of the supported OAuth Flows. + +##### Fixed Fields +Field Name | Type | Description +---|:---:|--- +implicit| [OAuth Flow Object](#oauthFlowObject) | Configuration for the OAuth Implicit flow +password| [OAuth Flow Object](#oauthFlowObject) | Configuration for the OAuth Resource Owner Password flow +clientCredentials| [OAuth Flow Object](#oauthFlowObject) | Configuration for the OAuth Client Credentials flow. Previously called `application` in OpenAPI 2.0. +authorizationCode| [OAuth Flow Object](#oauthFlowObject) | Configuration for the OAuth Authorization Code flow. Previously called `accessCode` in OpenAPI 2.0. + +This object MAY be extended with [Specification Extensions](#specificationExtensions). + +#### OAuth Flow Object + +Configuration details for a supported OAuth Flow + +##### Fixed Fields +Field Name | Type | Applies To | Description +---|:---:|---|--- +authorizationUrl | `string` | `oauth2` (`"implicit"`, `"authorizationCode"`) | **REQUIRED**. The authorization URL to be used for this flow. This MUST be in the form of a URL. +tokenUrl | `string` | `oauth2` (`"password"`, `"clientCredentials"`, `"authorizationCode"`) | **REQUIRED**. The token URL to be used for this flow. This MUST be in the form of a URL. +refreshUrl | `string` | `oauth2` | The URL to be used for obtaining refresh tokens. This MUST be in the form of a URL. +scopes | Map[`string`, `string`] | `oauth2` | **REQUIRED**. The available scopes for the OAuth2 security scheme. A map between the scope name and a short description for it. + +This object MAY be extended with [Specification Extensions](#specificationExtensions). + +##### OAuth Flow Object Examples + +```JSON +{ + "type": "oauth2", + "flows": { + "implicit": { + "authorizationUrl": "https://example.com/api/oauth/dialog", + "scopes": { + "write:pets": "modify pets in your account", + "read:pets": "read your pets" + } + }, + "authorizationCode": { + "authorizationUrl": "https://example.com/api/oauth/dialog", + "tokenUrl": "https://example.com/api/oauth/token", + "scopes": { + "write:pets": "modify pets in your account", + "read:pets": "read your pets" + } + } + } +} +``` + +```YAML +type: oauth2 +flows: + implicit: + authorizationUrl: https://example.com/api/oauth/dialog + scopes: + write:pets: modify pets in your account + read:pets: read your pets + authorizationCode: + authorizationUrl: https://example.com/api/oauth/dialog + tokenUrl: https://example.com/api/oauth/token + scopes: + write:pets: modify pets in your account + read:pets: read your pets +``` + + +#### Security Requirement Object + +Lists the required security schemes to execute this operation. +The name used for each property MUST correspond to a security scheme declared in the [Security Schemes](#componentsSecuritySchemes) under the [Components Object](#componentsObject). + +Security Requirement Objects that contain multiple schemes require that all schemes MUST be satisfied for a request to be authorized. +This enables support for scenarios where multiple query parameters or HTTP headers are required to convey security information. + +When a list of Security Requirement Objects is defined on the [Open API object](#oasObject) or [Operation Object](#operationObject), only one of Security Requirement Objects in the list needs to be satisfied to authorize the request. + +##### Patterned Fields + +Field Pattern | Type | Description +---|:---:|--- +{name} | [`string`] | Each name MUST correspond to a security scheme which is declared in the [Security Schemes](#componentsSecuritySchemes) under the [Components Object](#componentsObject). If the security scheme is of type `"oauth2"` or `"openIdConnect"`, then the value is a list of scope names required for the execution. For other security scheme types, the array MUST be empty. + +##### Security Requirement Object Examples + +###### Non-OAuth2 Security Requirement + +```json +{ + "api_key": [] +} +``` + +```yaml +api_key: [] +``` + +###### OAuth2 Security Requirement + +```json +{ + "petstore_auth": [ + "write:pets", + "read:pets" + ] +} +``` + +```yaml +petstore_auth: +- write:pets +- read:pets +``` + +### Specification Extensions + +While the OpenAPI Specification tries to accommodate most use cases, additional data can be added to extend the specification at certain points. + +The extensions properties are implemented as patterned fields that are always prefixed by `"x-"`. + +Field Pattern | Type | Description +---|:---:|--- +^x- | Any | Allows extensions to the OpenAPI Schema. The field name MUST begin with `x-`, for example, `x-internal-id`. The value can be `null`, a primitive, an array or an object. Can have any valid JSON format value. + +The extensions may or may not be supported by the available tooling, but those may be extended as well to add requested support (if tools are internal or open-sourced). + +### Security Filtering + +Some objects in the OpenAPI Specification MAY be declared and remain empty, or be completely removed, even though they are inherently the core of the API documentation. + +The reasoning is to allow an additional layer of access control over the documentation. +While not part of the specification itself, certain libraries MAY choose to allow access to parts of the documentation based on some form of authentication/authorization. + +Two examples of this: + +1. The [Paths Object](#pathsObject) MAY be empty. It may be counterintuitive, but this may tell the viewer that they got to the right place, but can't access any documentation. They'd still have access to the [Info Object](#infoObject) which may contain additional information regarding authentication. +2. The [Path Item Object](#pathItemObject) MAY be empty. In this case, the viewer will be aware that the path exists, but will not be able to see any of its operations or parameters. This is different than hiding the path itself from the [Paths Object](#pathsObject), so the user will not be aware of its existence. This allows the documentation provider to finely control what the viewer can see. + +## Appendix A: Revision History + +Version | Date | Notes +--- | --- | --- +3.0.0 | 2017-07-26 | Release of the OpenAPI Specification 3.0.0 +3.0.0-rc2 | 2017-06-16 | rc2 of the 3.0 specification +3.0.0-rc1 | 2017-04-27 | rc1 of the 3.0 specification +3.0.0-rc0 | 2017-02-28 | Implementer's Draft of the 3.0 specification +2.0 | 2015-12-31 | Donation of Swagger 2.0 to the Open API Initiative +2.0 | 2014-09-08 | Release of Swagger 2.0 +1.2 | 2014-03-14 | Initial release of the formal document. +1.1 | 2012-08-22 | Release of Swagger 1.1 +1.0 | 2011-08-10 | First release of the Swagger Specification diff --git a/vendor/github.com/googleapis/gnostic/OpenAPIv3/schema-generator/README.md b/vendor/github.com/googleapis/gnostic/OpenAPIv3/schema-generator/README.md new file mode 100644 index 000000000..3987bd7b9 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/OpenAPIv3/schema-generator/README.md @@ -0,0 +1,15 @@ +# OpenAPI 3.0 Schema Generator + +This directory contains a support tool that reads (scrapes) the +Markdown text specification for OpenAPI 3.0 and builds a +corresponding JSON schema. + +It also contains "3.0.md", a local copy of the OpenAPI specification +with modifications that fix minor inconsistencies and make it easier +to read. We hope to have these changes merged into the official +document. + +## Disclaimer + +This does not generate the official OpenAPI 3.0 JSON Schema, which +at the time of this commit, does not exist. diff --git a/vendor/github.com/googleapis/gnostic/OpenAPIv3/schema-generator/main.go b/vendor/github.com/googleapis/gnostic/OpenAPIv3/schema-generator/main.go new file mode 100644 index 000000000..91459310a --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/OpenAPIv3/schema-generator/main.go @@ -0,0 +1,846 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// schema-generator is a support tool that generates the OpenAPI v3 JSON schema. +// Yes, it's gross, but the OpenAPI 3.0 spec, which defines REST APIs with a +// rigorous JSON schema, is itself defined with a Markdown file. Ironic? +package main + +import ( + "encoding/json" + "fmt" + "io/ioutil" + "log" + "os" + "regexp" + "sort" + "strings" + "unicode" + "unicode/utf8" + + "github.com/googleapis/gnostic/jsonschema" +) + +// convert the first character of a string to lower case +func lowerFirst(s string) string { + if s == "" { + return "" + } + r, n := utf8.DecodeRuneInString(s) + return string(unicode.ToLower(r)) + s[n:] +} + +// Section models a section of the OpenAPI specification text document. +type Section struct { + Level int + Text string + Title string + Children []*Section +} + +// ReadSection reads a section of the OpenAPI Specification, recursively dividing it into subsections +func ReadSection(text string, level int) (section *Section) { + titlePattern := regexp.MustCompile("^" + strings.Repeat("#", level) + " .*$") + subtitlePattern := regexp.MustCompile("^" + strings.Repeat("#", level+1) + " .*$") + + section = &Section{Level: level, Text: text} + lines := strings.Split(string(text), "\n") + subsection := "" + for i, line := range lines { + if i == 0 && titlePattern.Match([]byte(line)) { + section.Title = line + } else if subtitlePattern.Match([]byte(line)) { + // we've found a subsection title. + // if there's a subsection that we've already been reading, save it + if len(subsection) != 0 { + child := ReadSection(subsection, level+1) + section.Children = append(section.Children, child) + } + // start a new subsection + subsection = line + "\n" + } else { + // add to the subsection we've been reading + subsection += line + "\n" + } + } + // if this section has subsections, save the last one + if len(section.Children) > 0 { + child := ReadSection(subsection, level+1) + section.Children = append(section.Children, child) + } + return +} + +// Display recursively displays a section of the specification. +func (s *Section) Display(section string) { + if len(s.Children) == 0 { + //fmt.Printf("%s\n", s.Text) + } else { + for i, child := range s.Children { + var subsection string + if section == "" { + subsection = fmt.Sprintf("%d", i) + } else { + subsection = fmt.Sprintf("%s.%d", section, i) + } + fmt.Printf("%-12s %s\n", subsection, child.NiceTitle()) + child.Display(subsection) + } + } +} + +// remove a link from a string, leaving only the text that follows it +// if there is no link, just return the string +func stripLink(input string) (output string) { + stringPattern := regexp.MustCompile("^(.*)$") + stringWithLinkPattern := regexp.MustCompile("^(.*)$") + if matches := stringWithLinkPattern.FindSubmatch([]byte(input)); matches != nil { + return string(matches[1]) + } else if matches := stringPattern.FindSubmatch([]byte(input)); matches != nil { + return string(matches[1]) + } else { + return input + } +} + +// NiceTitle returns a nice-to-display title for a section by removing the opening "###" and any links. +func (s *Section) NiceTitle() string { + titlePattern := regexp.MustCompile("^#+ (.*)$") + titleWithLinkPattern := regexp.MustCompile("^#+ (.*)$") + if matches := titleWithLinkPattern.FindSubmatch([]byte(s.Title)); matches != nil { + return string(matches[1]) + } else if matches := titlePattern.FindSubmatch([]byte(s.Title)); matches != nil { + return string(matches[1]) + } else { + return "" + } +} + +// replace markdown links with their link text (removing the URL part) +func removeMarkdownLinks(input string) (output string) { + markdownLink := regexp.MustCompile("\\[([^\\]\\[]*)\\]\\(([^\\)]*)\\)") // matches [link title](link url) + output = string(markdownLink.ReplaceAll([]byte(input), []byte("$1"))) + return +} + +// extract the fixed fields from a table in a section +func parseFixedFields(input string, schemaObject *SchemaObject) { + lines := strings.Split(input, "\n") + for _, line := range lines { + + // replace escaped bars with "OR", assuming these are used to describe union types + line = strings.Replace(line, " \\| ", " OR ", -1) + + // split the table on the remaining bars + parts := strings.Split(line, "|") + if len(parts) > 1 { + fieldName := strings.Trim(stripLink(parts[0]), " ") + if fieldName != "Field Name" && fieldName != "---" { + + if len(parts) == 3 || len(parts) == 4 { + // this is what we expect + } else { + log.Printf("ERROR: %+v", parts) + } + + typeName := parts[1] + typeName = strings.Replace(typeName, "{expression}", "Expression", -1) + typeName = strings.Trim(typeName, " ") + typeName = strings.Replace(typeName, "`", "", -1) + typeName = removeMarkdownLinks(typeName) + typeName = strings.Replace(typeName, " ", "", -1) + typeName = strings.Replace(typeName, "Object", "", -1) + isArray := false + if typeName[0] == '[' && typeName[len(typeName)-1] == ']' { + typeName = typeName[1 : len(typeName)-1] + isArray = true + } + isMap := false + mapPattern := regexp.MustCompile("^Mapstring,\\[(.*)\\]$") + if matches := mapPattern.FindSubmatch([]byte(typeName)); matches != nil { + typeName = string(matches[1]) + isMap = true + } else { + // match map[string,] + mapPattern2 := regexp.MustCompile("^Map\\[string,(.+)\\]$") + if matches := mapPattern2.FindSubmatch([]byte(typeName)); matches != nil { + typeName = string(matches[1]) + isMap = true + } + } + description := strings.Trim(parts[len(parts)-1], " ") + description = removeMarkdownLinks(description) + description = strings.Replace(description, "\n", " ", -1) + + requiredLabel1 := "**Required.** " + requiredLabel2 := "**REQUIRED**." + if strings.Contains(description, requiredLabel1) || + strings.Contains(description, requiredLabel2) { + // only include required values if their "Validity" is "Any" or if no validity is specified + valid := true + if len(parts) == 4 { + validity := parts[2] + if strings.Contains(validity, "Any") { + valid = true + } else { + valid = false + } + } + if valid { + schemaObject.RequiredFields = append(schemaObject.RequiredFields, fieldName) + } + description = strings.Replace(description, requiredLabel1, "", -1) + description = strings.Replace(description, requiredLabel2, "", -1) + } + schemaField := SchemaObjectField{ + Name: fieldName, + Type: typeName, + IsArray: isArray, + IsMap: isMap, + Description: description, + } + schemaObject.FixedFields = append(schemaObject.FixedFields, schemaField) + } + } + } +} + +// extract the patterned fields from a table in a section +func parsePatternedFields(input string, schemaObject *SchemaObject) { + lines := strings.Split(input, "\n") + for _, line := range lines { + + line = strings.Replace(line, " \\| ", " OR ", -1) + + parts := strings.Split(line, "|") + if len(parts) > 1 { + fieldName := strings.Trim(stripLink(parts[0]), " ") + fieldName = removeMarkdownLinks(fieldName) + if fieldName == "HTTP Status Code" { + fieldName = "^([0-9X]{3})$" + } + if fieldName != "Field Pattern" && fieldName != "---" { + typeName := parts[1] + typeName = strings.Trim(typeName, " ") + typeName = strings.Replace(typeName, "`", "", -1) + typeName = removeMarkdownLinks(typeName) + typeName = strings.Replace(typeName, " ", "", -1) + typeName = strings.Replace(typeName, "Object", "", -1) + typeName = strings.Replace(typeName, "{expression}", "Expression", -1) + isArray := false + if typeName[0] == '[' && typeName[len(typeName)-1] == ']' { + typeName = typeName[1 : len(typeName)-1] + isArray = true + } + isMap := false + mapPattern := regexp.MustCompile("^Mapstring,\\[(.*)\\]$") + if matches := mapPattern.FindSubmatch([]byte(typeName)); matches != nil { + typeName = string(matches[1]) + isMap = true + } + description := strings.Trim(parts[len(parts)-1], " ") + description = removeMarkdownLinks(description) + description = strings.Replace(description, "\n", " ", -1) + + schemaField := SchemaObjectField{ + Name: fieldName, + Type: typeName, + IsArray: isArray, + IsMap: isMap, + Description: description, + } + schemaObject.PatternedFields = append(schemaObject.PatternedFields, schemaField) + } + } + } +} + +// SchemaObjectField describes a field of a schema. +type SchemaObjectField struct { + Name string `json:"name"` + Type string `json:"type"` + IsArray bool `json:"is_array"` + IsMap bool `json:"is_map"` + Description string `json:"description"` +} + +// SchemaObject describes a schema. +type SchemaObject struct { + Name string `json:"name"` + ID string `json:"id"` + Description string `json:"description"` + Extendable bool `json:"extendable"` + RequiredFields []string `json:"required"` + FixedFields []SchemaObjectField `json:"fixed"` + PatternedFields []SchemaObjectField `json:"patterned"` +} + +// SchemaModel is a collection of schemas. +type SchemaModel struct { + Objects []SchemaObject +} + +func (m *SchemaModel) objectWithID(id string) *SchemaObject { + for _, object := range m.Objects { + if object.ID == id { + return &object + } + } + return nil +} + +// NewSchemaModel returns a new SchemaModel. +func NewSchemaModel(filename string) (schemaModel *SchemaModel, err error) { + + b, err := ioutil.ReadFile("3.0.md") + if err != nil { + return nil, err + } + + // divide the specification into sections + document := ReadSection(string(b), 1) + document.Display("") + + // read object names and their details + specification := document.Children[4] // fragile! the section title is "Specification" + schema := specification.Children[7] // fragile! the section title is "Schema" + anchor := regexp.MustCompile("^#### 0 { + description := section.Children[0].Text + description = removeMarkdownLinks(description) + description = strings.Trim(description, " \t\n") + description = strings.Replace(description, "\n", " ", -1) + schemaObject.Description = description + } + + // is the object extendable? + if strings.Contains(section.Text, "Specification Extensions") { + schemaObject.Extendable = true + } + + // look for fixed fields + for _, child := range section.Children { + if child.NiceTitle() == "Fixed Fields" { + parseFixedFields(child.Text, &schemaObject) + } + } + + // look for patterned fields + for _, child := range section.Children { + if child.NiceTitle() == "Patterned Fields" { + parsePatternedFields(child.Text, &schemaObject) + } + } + + schemaObjects = append(schemaObjects, schemaObject) + } + } + + return &SchemaModel{Objects: schemaObjects}, nil +} + +// UnionType represents a union of two types. +type UnionType struct { + Name string + ObjectType1 string + ObjectType2 string +} + +var unionTypes map[string]*UnionType + +func noteUnionType(typeName, objectType1, objectType2 string) { + if unionTypes == nil { + unionTypes = make(map[string]*UnionType, 0) + } + unionTypes[typeName] = &UnionType{ + Name: typeName, + ObjectType1: objectType1, + ObjectType2: objectType2, + } +} + +// MapType represents a map of a specified type (with string keys). +type MapType struct { + Name string + ObjectType string +} + +var mapTypes map[string]*MapType + +func noteMapType(typeName, objectType string) { + if mapTypes == nil { + mapTypes = make(map[string]*MapType, 0) + } + mapTypes[typeName] = &MapType{ + Name: typeName, + ObjectType: objectType, + } +} + +func definitionNameForType(typeName string) string { + name := typeName + switch typeName { + case "OAuthFlows": + name = "oauthFlows" + case "OAuthFlow": + name = "oauthFlow" + case "XML": + name = "xml" + case "ExternalDocumentation": + name = "externalDocs" + default: + // does the name contain an "OR" + if parts := strings.Split(typeName, "OR"); len(parts) > 1 { + name = lowerFirst(parts[0]) + "Or" + parts[1] + noteUnionType(name, parts[0], parts[1]) + } else { + name = lowerFirst(typeName) + } + } + return "#/definitions/" + name +} + +func pluralize(name string) string { + if name == "any" { + return "anys" + } + switch name[len(name)-1] { + case 'y': + name = name[0:len(name)-1] + "ies" + case 's': + name = name + "Map" + default: + name = name + "s" + } + return name +} + +func definitionNameForMapOfType(typeName string) string { + // pluralize the type name to get the name of an object representing a map of them + var elementTypeName string + var mapTypeName string + if parts := strings.Split(typeName, "OR"); len(parts) > 1 { + elementTypeName = lowerFirst(parts[0]) + "Or" + parts[1] + noteUnionType(elementTypeName, parts[0], parts[1]) + mapTypeName = pluralize(lowerFirst(parts[0])) + "Or" + pluralize(parts[1]) + } else { + elementTypeName = lowerFirst(typeName) + mapTypeName = pluralize(elementTypeName) + } + noteMapType(mapTypeName, elementTypeName) + return "#/definitions/" + mapTypeName +} + +func updateSchemaFieldWithModelField(schemaField *jsonschema.Schema, modelField *SchemaObjectField) { + // fmt.Printf("IN %s:%+v\n", name, schemaField) + // update the attributes of the schema field + if modelField.IsArray { + // is array + itemSchema := &jsonschema.Schema{} + switch modelField.Type { + case "string": + itemSchema.Type = jsonschema.NewStringOrStringArrayWithString("string") + case "boolean": + itemSchema.Type = jsonschema.NewStringOrStringArrayWithString("boolean") + case "primitive": + itemSchema.Ref = stringptr(definitionNameForType("Primitive")) + default: + itemSchema.Ref = stringptr(definitionNameForType(modelField.Type)) + } + schemaField.Items = jsonschema.NewSchemaOrSchemaArrayWithSchema(itemSchema) + schemaField.Type = jsonschema.NewStringOrStringArrayWithString("array") + boolValue := true // not sure about this + schemaField.UniqueItems = &boolValue + } else if modelField.IsMap { + schemaField.Ref = stringptr(definitionNameForMapOfType(modelField.Type)) + } else { + // is scalar + switch modelField.Type { + case "string": + schemaField.Type = jsonschema.NewStringOrStringArrayWithString("string") + case "boolean": + schemaField.Type = jsonschema.NewStringOrStringArrayWithString("boolean") + case "primitive": + schemaField.Ref = stringptr(definitionNameForType("Primitive")) + default: + schemaField.Ref = stringptr(definitionNameForType(modelField.Type)) + } + } +} + +func buildSchemaWithModel(modelObject *SchemaObject) (schema *jsonschema.Schema) { + + schema = &jsonschema.Schema{} + schema.Type = jsonschema.NewStringOrStringArrayWithString("object") + + if modelObject.RequiredFields != nil && len(modelObject.RequiredFields) > 0 { + // copy array + arrayCopy := modelObject.RequiredFields + schema.Required = &arrayCopy + } + + schema.AdditionalProperties = jsonschema.NewSchemaOrBooleanWithBoolean(false) + + schema.Description = stringptr(modelObject.Description) + + // handle fixed fields + if modelObject.FixedFields != nil { + newNamedSchemas := make([]*jsonschema.NamedSchema, 0) + for _, modelField := range modelObject.FixedFields { + schemaField := schema.PropertyWithName(modelField.Name) + if schemaField == nil { + // create and add the schema field + schemaField = &jsonschema.Schema{} + namedSchema := &jsonschema.NamedSchema{Name: modelField.Name, Value: schemaField} + newNamedSchemas = append(newNamedSchemas, namedSchema) + } + updateSchemaFieldWithModelField(schemaField, &modelField) + } + for _, pair := range newNamedSchemas { + if schema.Properties == nil { + properties := make([]*jsonschema.NamedSchema, 0) + schema.Properties = &properties + } + *(schema.Properties) = append(*(schema.Properties), pair) + } + + } else { + if schema.Properties != nil { + fmt.Printf("SCHEMA SHOULD NOT HAVE PROPERTIES %s\n", modelObject.ID) + } + } + + // handle patterned fields + if modelObject.PatternedFields != nil { + newNamedSchemas := make([]*jsonschema.NamedSchema, 0) + + for _, modelField := range modelObject.PatternedFields { + schemaField := schema.PatternPropertyWithName(modelField.Name) + if schemaField == nil { + // create and add the schema field + schemaField = &jsonschema.Schema{} + // Component names should match "^[a-zA-Z0-9\.\-_]+$" + // See https://github.com/OAI/OpenAPI-Specification/blob/OpenAPI.next/versions/3.0.md#componentsObject + nameRegex := "^[a-zA-Z0-9\\\\.\\\\-_]+$" + if modelObject.Name == "Scopes Object" { + nameRegex = "^" + } else if modelObject.Name == "Headers Object" { + nameRegex = "^[a-zA-Z0-9!#\\-\\$%&'\\*\\+\\\\\\.\\^_`\\|~]+" + } + propertyName := strings.Replace(modelField.Name, "{name}", nameRegex, -1) + // The field name MUST begin with a slash, see https://github.com/OAI/OpenAPI-Specification/blob/OpenAPI.next/versions/3.0.md#paths-object + // JSON Schema for OpenAPI v2 uses "^/" as regex for paths, see https://github.com/OAI/OpenAPI-Specification/blob/OpenAPI.next/schemas/v2.0/schema.json#L173 + propertyName = strings.Replace(propertyName, "/{path}", "^/", -1) + // Replace human-friendly (and regex-confusing) description with a blank pattern + propertyName = strings.Replace(propertyName, "{expression}", "^", -1) + propertyName = strings.Replace(propertyName, "{property}", "^", -1) + namedSchema := &jsonschema.NamedSchema{Name: propertyName, Value: schemaField} + newNamedSchemas = append(newNamedSchemas, namedSchema) + } + updateSchemaFieldWithModelField(schemaField, &modelField) + } + + for _, pair := range newNamedSchemas { + if schema.PatternProperties == nil { + properties := make([]*jsonschema.NamedSchema, 0) + schema.PatternProperties = &properties + } + *(schema.PatternProperties) = append(*(schema.PatternProperties), pair) + } + + } else { + if schema.PatternProperties != nil && !modelObject.Extendable { + fmt.Printf("SCHEMA SHOULD NOT HAVE PATTERN PROPERTIES %s\n", modelObject.ID) + } + } + + if modelObject.Extendable { + schemaField := schema.PatternPropertyWithName("^x-") + if schemaField != nil { + schemaField.Ref = stringptr("#/definitions/specificationExtension") + } else { + schemaField = &jsonschema.Schema{} + schemaField.Ref = stringptr("#/definitions/specificationExtension") + namedSchema := &jsonschema.NamedSchema{Name: "^x-", Value: schemaField} + if schema.PatternProperties == nil { + properties := make([]*jsonschema.NamedSchema, 0) + schema.PatternProperties = &properties + } + *(schema.PatternProperties) = append(*(schema.PatternProperties), namedSchema) + } + } else { + schemaField := schema.PatternPropertyWithName("^x-") + if schemaField != nil { + fmt.Printf("INVALID EXTENSION SUPPORT %s:%s\n", modelObject.ID, "^x-") + } + } + + return schema +} + +// return a pointer to a copy of a passed-in string +func stringptr(input string) (output *string) { + return &input +} + +func int64ptr(input int64) (output *int64) { + return &input +} + +func arrayOfSchema() *jsonschema.Schema { + return &jsonschema.Schema{ + Type: jsonschema.NewStringOrStringArrayWithString("array"), + MinItems: int64ptr(1), + Items: jsonschema.NewSchemaOrSchemaArrayWithSchema(&jsonschema.Schema{Ref: stringptr("#/definitions/schemaOrReference")}), + } +} + +func main() { + // read and parse the text specification into a model structure + model, err := NewSchemaModel("3.0.md") + if err != nil { + panic(err) + } + + // write the model as JSON (for debugging) + modelJSON, _ := json.MarshalIndent(model, "", " ") + err = ioutil.WriteFile("model.json", modelJSON, 0644) + if err != nil { + panic(err) + } + + // build the top-level schema using the "OAS" model + oasModel := model.objectWithID("oas") + if oasModel == nil { + log.Printf("Unable to find OAS model. Has the source document structure changed?") + os.Exit(-1) + } + schema := buildSchemaWithModel(oasModel) + + // manually set a few fields + schema.Title = stringptr("A JSON Schema for OpenAPI 3.0.") + schema.ID = stringptr("http://openapis.org/v3/schema.json#") + schema.Schema = stringptr("http://json-schema.org/draft-04/schema#") + + // loop over all models and create the corresponding schema objects + definitions := make([]*jsonschema.NamedSchema, 0) + schema.Definitions = &definitions + + for _, modelObject := range model.Objects { + if modelObject.ID == "oas" { + continue + } + definitionSchema := buildSchemaWithModel(&modelObject) + name := modelObject.ID + if name == "externalDocumentation" { + name = "externalDocs" + } + *schema.Definitions = append(*schema.Definitions, jsonschema.NewNamedSchema(name, definitionSchema)) + } + + // copy the properties of headerObject from parameterObject + headerObject := schema.DefinitionWithName("header") + parameterObject := schema.DefinitionWithName("parameter") + if parameterObject != nil { + newArray := make([]*jsonschema.NamedSchema, 0) + for _, property := range *(parameterObject.Properties) { + // we need to remove a few properties... + if property.Name != "name" && property.Name != "in" { + newArray = append(newArray, property) + } + } + headerObject.Properties = &newArray + // "So a shorthand for copying array arr would be tmp := append([]int{}, arr...)" + ppArray := make([]*jsonschema.NamedSchema, 0) + ppArray = append(ppArray, *(parameterObject.PatternProperties)...) + headerObject.PatternProperties = &ppArray + } + + // generate implied union types + unionTypeKeys := make([]string, 0, len(unionTypes)) + for key := range unionTypes { + unionTypeKeys = append(unionTypeKeys, key) + } + sort.Strings(unionTypeKeys) + for _, unionTypeKey := range unionTypeKeys { + unionType := unionTypes[unionTypeKey] + objectSchema := schema.DefinitionWithName(unionType.Name) + if objectSchema == nil { + objectSchema = &jsonschema.Schema{} + oneOf := make([]*jsonschema.Schema, 0) + oneOf = append(oneOf, &jsonschema.Schema{Ref: stringptr("#/definitions/" + lowerFirst(unionType.ObjectType1))}) + oneOf = append(oneOf, &jsonschema.Schema{Ref: stringptr("#/definitions/" + lowerFirst(unionType.ObjectType2))}) + objectSchema.OneOf = &oneOf + *schema.Definitions = append(*schema.Definitions, jsonschema.NewNamedSchema(unionType.Name, objectSchema)) + } + } + + // generate implied map types + mapTypeKeys := make([]string, 0, len(mapTypes)) + for key := range mapTypes { + mapTypeKeys = append(mapTypeKeys, key) + } + sort.Strings(mapTypeKeys) + for _, mapTypeKey := range mapTypeKeys { + mapType := mapTypes[mapTypeKey] + objectSchema := schema.DefinitionWithName(mapType.Name) + if objectSchema == nil { + objectSchema = &jsonschema.Schema{} + objectSchema.Type = jsonschema.NewStringOrStringArrayWithString("object") + additionalPropertiesSchema := &jsonschema.Schema{} + if mapType.ObjectType == "string" { + additionalPropertiesSchema.Type = jsonschema.NewStringOrStringArrayWithString("string") + } else { + additionalPropertiesSchema.Ref = stringptr("#/definitions/" + lowerFirst(mapType.ObjectType)) + } + objectSchema.AdditionalProperties = jsonschema.NewSchemaOrBooleanWithSchema(additionalPropertiesSchema) + *schema.Definitions = append(*schema.Definitions, jsonschema.NewNamedSchema(mapType.Name, objectSchema)) + } + } + + // add schema objects for "object", "any", and "expression" + if true { + objectSchema := &jsonschema.Schema{} + objectSchema.Type = jsonschema.NewStringOrStringArrayWithString("object") + objectSchema.AdditionalProperties = jsonschema.NewSchemaOrBooleanWithBoolean(true) + *schema.Definitions = append(*schema.Definitions, jsonschema.NewNamedSchema("object", objectSchema)) + } + if true { + objectSchema := &jsonschema.Schema{} + objectSchema.AdditionalProperties = jsonschema.NewSchemaOrBooleanWithBoolean(true) + *schema.Definitions = append(*schema.Definitions, jsonschema.NewNamedSchema("any", objectSchema)) + } + if true { + objectSchema := &jsonschema.Schema{} + objectSchema.Type = jsonschema.NewStringOrStringArrayWithString("object") + objectSchema.AdditionalProperties = jsonschema.NewSchemaOrBooleanWithBoolean(true) + *schema.Definitions = append(*schema.Definitions, jsonschema.NewNamedSchema("expression", objectSchema)) + } + + // add schema objects for "specificationExtension" + if true { + objectSchema := &jsonschema.Schema{} + objectSchema.Description = stringptr("Any property starting with x- is valid.") + oneOf := make([]*jsonschema.Schema, 0) + oneOf = append(oneOf, &jsonschema.Schema{Type: jsonschema.NewStringOrStringArrayWithString("null")}) + oneOf = append(oneOf, &jsonschema.Schema{Type: jsonschema.NewStringOrStringArrayWithString("number")}) + oneOf = append(oneOf, &jsonschema.Schema{Type: jsonschema.NewStringOrStringArrayWithString("boolean")}) + oneOf = append(oneOf, &jsonschema.Schema{Type: jsonschema.NewStringOrStringArrayWithString("string")}) + oneOf = append(oneOf, &jsonschema.Schema{Type: jsonschema.NewStringOrStringArrayWithString("object")}) + oneOf = append(oneOf, &jsonschema.Schema{Type: jsonschema.NewStringOrStringArrayWithString("array")}) + objectSchema.OneOf = &oneOf + *schema.Definitions = append(*schema.Definitions, jsonschema.NewNamedSchema("specificationExtension", objectSchema)) + } + + // add schema objects for "defaultType" + if true { + objectSchema := &jsonschema.Schema{} + oneOf := make([]*jsonschema.Schema, 0) + oneOf = append(oneOf, &jsonschema.Schema{Type: jsonschema.NewStringOrStringArrayWithString("null")}) + oneOf = append(oneOf, &jsonschema.Schema{Type: jsonschema.NewStringOrStringArrayWithString("array")}) + oneOf = append(oneOf, &jsonschema.Schema{Type: jsonschema.NewStringOrStringArrayWithString("object")}) + oneOf = append(oneOf, &jsonschema.Schema{Type: jsonschema.NewStringOrStringArrayWithString("number")}) + oneOf = append(oneOf, &jsonschema.Schema{Type: jsonschema.NewStringOrStringArrayWithString("boolean")}) + oneOf = append(oneOf, &jsonschema.Schema{Type: jsonschema.NewStringOrStringArrayWithString("string")}) + objectSchema.OneOf = &oneOf + *schema.Definitions = append(*schema.Definitions, jsonschema.NewNamedSchema("defaultType", objectSchema)) + } + + // add schema objects for "primitive" + if false { // we don't seem to need these for 3.0 RC2 + objectSchema := &jsonschema.Schema{} + oneOf := make([]*jsonschema.Schema, 0) + oneOf = append(oneOf, &jsonschema.Schema{Type: jsonschema.NewStringOrStringArrayWithString("number")}) + oneOf = append(oneOf, &jsonschema.Schema{Type: jsonschema.NewStringOrStringArrayWithString("boolean")}) + oneOf = append(oneOf, &jsonschema.Schema{Type: jsonschema.NewStringOrStringArrayWithString("string")}) + objectSchema.OneOf = &oneOf + *schema.Definitions = append(*schema.Definitions, jsonschema.NewNamedSchema("primitive", objectSchema)) + } + + // force a few more things into the "schema" schema + schemaObject := schema.DefinitionWithName("schema") + schemaObject.CopyOfficialSchemaProperties( + []string{ + "title", + "multipleOf", + "maximum", + "exclusiveMaximum", + "minimum", + "exclusiveMinimum", + "maxLength", + "minLength", + "pattern", + "maxItems", + "minItems", + "uniqueItems", + "maxProperties", + "minProperties", + "required", + "enum", + }) + schemaObject.AdditionalProperties = jsonschema.NewSchemaOrBooleanWithBoolean(false) + schemaObject.AddProperty("type", &jsonschema.Schema{Type: jsonschema.NewStringOrStringArrayWithString("string")}) + schemaObject.AddProperty("allOf", arrayOfSchema()) + schemaObject.AddProperty("oneOf", arrayOfSchema()) + schemaObject.AddProperty("anyOf", arrayOfSchema()) + schemaObject.AddProperty("not", &jsonschema.Schema{Ref: stringptr("#/definitions/schema")}) + anyOf := make([]*jsonschema.Schema, 0) + anyOf = append(anyOf, &jsonschema.Schema{Ref: stringptr("#/definitions/schemaOrReference")}) + anyOf = append(anyOf, arrayOfSchema()) + schemaObject.AddProperty("items", + &jsonschema.Schema{AnyOf: &anyOf}) + schemaObject.AddProperty("properties", &jsonschema.Schema{ + Type: jsonschema.NewStringOrStringArrayWithString("object"), + AdditionalProperties: jsonschema.NewSchemaOrBooleanWithSchema( + &jsonschema.Schema{Ref: stringptr("#/definitions/schemaOrReference")})}) + + if true { // add additionalProperties schema object + oneOf := make([]*jsonschema.Schema, 0) + oneOf = append(oneOf, &jsonschema.Schema{Ref: stringptr("#/definitions/schemaOrReference")}) + oneOf = append(oneOf, &jsonschema.Schema{Type: jsonschema.NewStringOrStringArrayWithString("boolean")}) + schemaObject.AddProperty("additionalProperties", &jsonschema.Schema{OneOf: &oneOf}) + } + + schemaObject.AddProperty("default", &jsonschema.Schema{Ref: stringptr("#/definitions/defaultType")}) + schemaObject.AddProperty("description", &jsonschema.Schema{Type: jsonschema.NewStringOrStringArrayWithString("string")}) + schemaObject.AddProperty("format", &jsonschema.Schema{Type: jsonschema.NewStringOrStringArrayWithString("string")}) + + // fix the content object + contentObject := schema.DefinitionWithName("content") + if contentObject != nil { + pairs := make([]*jsonschema.NamedSchema, 0) + contentObject.PatternProperties = &pairs + namedSchema := &jsonschema.NamedSchema{Name: "^", Value: &jsonschema.Schema{Ref: stringptr("#/definitions/mediaType")}} + *(contentObject.PatternProperties) = append(*(contentObject.PatternProperties), namedSchema) + } + + // write the updated schema + output := schema.JSONString() + err = ioutil.WriteFile("schema.json", []byte(output), 0644) + if err != nil { + panic(err) + } +} diff --git a/vendor/github.com/googleapis/gnostic/README.md b/vendor/github.com/googleapis/gnostic/README.md new file mode 100644 index 000000000..d350f3f01 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/README.md @@ -0,0 +1,103 @@ +[![Build Status](https://travis-ci.org/googleapis/gnostic.svg?branch=master)](https://travis-ci.org/googleapis/gnostic) + +# ⨁ gnostic + +This repository contains a Go command line tool which converts +JSON and YAML [OpenAPI](https://github.com/OAI/OpenAPI-Specification) +descriptions to and from equivalent Protocol Buffer representations. + +[Protocol Buffers](https://developers.google.com/protocol-buffers/) +provide a language-neutral, platform-neutral, extensible mechanism +for serializing structured data. +**gnostic**'s Protocol Buffer models for the OpenAPI Specification +can be used to generate code that includes data structures with +explicit fields for the elements of an OpenAPI description. +This makes it possible for developers to work with OpenAPI +descriptions in type-safe ways, which is particularly useful +in strongly-typed languages like Go and Swift. + +**gnostic** reads OpenAPI descriptions into +these generated data structures, reports errors, +resolves internal dependencies, and writes the results +in a binary form that can be used in any language that is +supported by the Protocol Buffer tools. +A plugin interface simplifies integration with API +tools written in a variety of different languages, +and when necessary, Protocol Buffer OpenAPI descriptions +can be reexported as JSON or YAML. + +**gnostic** compilation code and OpenAPI Protocol Buffer +models are automatically generated from an +[OpenAPI JSON Schema](https://github.com/OAI/OpenAPI-Specification/blob/master/schemas/v2.0/schema.json). +Source code for the generator is in the [generate-gnostic](generate-gnostic) directory. + +## Disclaimer + +This is prerelease software and work in progress. Feedback and +contributions are welcome, but we currently make no guarantees of +function or stability. + +## Requirements + +**gnostic** can be run in any environment that supports [Go](http://golang.org) +and the [Google Protocol Buffer Compiler](https://github.com/google/protobuf). + +## Installation + +1. Get this package by downloading it with `go get`. + + go get github.com/googleapis/gnostic + +2. [Optional] Build and run the compiler generator. +This uses the OpenAPI JSON schema to generate a Protocol Buffer language file +that describes the OpenAPI specification and a Go-language file of code that +will read a JSON or YAML OpenAPI representation into the generated protocol +buffers. Pre-generated versions of these files are in the OpenAPIv2 directory. + + cd $GOPATH/src/github.com/googleapis/gnostic/generate-gnostic + go install + cd .. + generate-gnostic --v2 + +3. [Optional] Generate Protocol Buffer support code. +A pre-generated version of this file is checked into the OpenAPIv2 directory. +This step requires a local installation of protoc, the Protocol Buffer Compiler. +You can get protoc [here](https://github.com/google/protobuf). + + ./COMPILE-PROTOS.sh + +4. [Optional] Rebuild **gnostic**. This is only necessary if you've performed steps +2 or 3 above. + + go install github.com/googleapis/gnostic + +5. Run **gnostic**. This will create a file in the current directory named "petstore.pb" that contains a binary +Protocol Buffer description of a sample API. + + gnostic --pb-out=. examples/petstore.json + +6. You can also compile files that you specify with a URL. Here's another way to compile the previous +example. This time we're creating "petstore.text", which contains a textual representation of the +Protocol Buffer description. This is mainly for use in testing and debugging. + + gnostic --text-out=petstore.text https://raw.githubusercontent.com/googleapis/gnostic/master/examples/petstore.json + +7. For a sample application, see apps/report. + + go install github.com/googleapis/gnostic/apps/report + report petstore.pb + +8. **gnostic** supports plugins. This builds and runs a sample plugin +that reports some basic information about an API. The "-" causes the plugin to +write its output to stdout. + + go install github.com/googleapis/gnostic/plugins/gnostic-go-sample + gnostic examples/petstore.json --go-sample-out=- + +## Copyright + +Copyright 2017, Google Inc. + +## License + +Released under the Apache 2.0 license. diff --git a/vendor/github.com/googleapis/gnostic/apps/disco/README.md b/vendor/github.com/googleapis/gnostic/apps/disco/README.md new file mode 100644 index 000000000..82cd51b81 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/apps/disco/README.md @@ -0,0 +1,38 @@ +# disco + +This directory contains a tool for working with Google's Discovery API and Discovery Format, +including the ability to convert Discovery Format descriptions to OpenAPI. + +Installation: + + go get github.com/googleapis/gnostic + go install github.com/googleapis/gnostic/apps/disco + + +Usage: + + disco help + +Prints a list of commands and options. + + disco list [--raw] + +Calls the Google Discovery API and lists available APIs. +The `--raw` option prints the raw results of the Discovery List APIs call. + + disco get [] [] [--raw] [--openapi2] [--openapi3] [--features] [--schemas] [--all] + +Gets the specified API and version from the Google Discovery API. +`` can be omitted if it is unique. +The `--raw` option saves the raw Discovery Format description. +The `--openapi2` option rewrites the API description in OpenAPI v2. +The `--openapi3` option rewrites the API description in OpenAPI v3. +The `--features` option displays the contents of the `features` sections of discovery documents. +The `--schemas` option displays information about the schemas defined for the API. +The `--all` option runs the other associated operations for all of the APIs available from the Discovery Service. +When `--all` is specified, `` and `` should be omitted. + + disco [--openapi2] [--openapi3] [--features] [--schemas] + +Applies the specified operations to a local file. See the `get` command for details. + diff --git a/vendor/github.com/googleapis/gnostic/apps/disco/list.go b/vendor/github.com/googleapis/gnostic/apps/disco/list.go new file mode 100644 index 000000000..8f9dc9676 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/apps/disco/list.go @@ -0,0 +1,79 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "encoding/json" + "errors" + "strings" +) + +// APIsListServiceURL is the URL for the Google APIs Discovery Service +const APIsListServiceURL = "https://www.googleapis.com/discovery/v1/apis" + +// A List represents the results of a call to the apis/list API. +// https://developers.google.com/discovery/v1/reference/apis/list +type List struct { + Kind string `json:"kind"` + DiscoveryVersion string `json:"discoveryVersion"` + APIs []*API `json:"items"` +} + +// NewList unmarshals the bytes into a Document. +func NewList(bytes []byte) (*List, error) { + var listResponse List + err := json.Unmarshal(bytes, &listResponse) + return &listResponse, err +} + +// An API represents the an API description returned by the apis/list API. +type API struct { + Kind string `json:"kind"` + ID string `json:"id"` + Name string `json:"name"` + Version string `json:"version"` + Title string `json:"title"` + Description string `json:"description"` + DiscoveryRestURL string `json:"discoveryRestUrl"` + DiscoveryLink string `json:"discoveryLink"` + Icons map[string]string `json:"icons"` + DocumentationLink string `json:"documentationLink"` + Labels []string `json:"labels"` + Preferred bool `json:"preferred"` +} + +// APIWithNameAndVersion returns the API with a specified name and version. +// If version is the empty string, the API name must be unique. +func (a *List) APIWithNameAndVersion(name string, version string) (*API, error) { + var api *API // the API to return + versions := make([]string, 0) // the matching version names + // Scan the list for matching APIs and versions. + for _, item := range a.APIs { + if item.Name == name { + if version == "" || version == item.Version { + api = item + versions = append(versions, item.Version) + } + } + } + switch { + case len(versions) == 0: + return nil, errors.New(name + " was not found.") + case len(versions) > 1: + return nil, errors.New(name + " has multiple versions: " + strings.Join(versions, ", ")) + default: + return api, nil + } +} diff --git a/vendor/github.com/googleapis/gnostic/apps/disco/main.go b/vendor/github.com/googleapis/gnostic/apps/disco/main.go new file mode 100644 index 000000000..4f6135c2f --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/apps/disco/main.go @@ -0,0 +1,255 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "errors" + "fmt" + "io/ioutil" + "log" + "os" + "strings" + + "github.com/docopt/docopt-go" + "github.com/golang/protobuf/proto" + "github.com/googleapis/gnostic/compiler" + discovery "github.com/googleapis/gnostic/discovery" +) + +func main() { + usage := ` +Usage: + disco help + disco list [--raw] + disco get [] [] [--raw] [--openapi2] [--openapi3] [--features] [--schemas] [--all] + disco [--openapi2] [--openapi3] [--features] [--schemas] + ` + arguments, err := docopt.Parse(usage, nil, false, "Disco 1.0", false) + if err != nil { + log.Fatalf("%+v", err) + } + + // Help. + if arguments["help"].(bool) { + fmt.Println("\nRead and process Google's Discovery Format for APIs.") + fmt.Println(usage) + fmt.Println("To learn more about Discovery Format, visit https://developers.google.com/discovery/\n") + } + + // List APIs. + if arguments["list"].(bool) { + // Read the list of APIs from the apis/list service. + bytes, err := compiler.FetchFile(APIsListServiceURL) + if err != nil { + log.Fatalf("%+v", err) + } + if arguments["--raw"].(bool) { + ioutil.WriteFile("disco-list.json", bytes, 0644) + } else { + // Unpack the apis/list response. + listResponse, err := NewList(bytes) + if err != nil { + log.Fatalf("%+v", err) + } + // List the APIs. + for _, api := range listResponse.APIs { + fmt.Printf("%s %s\n", api.Name, api.Version) + } + } + } + + // Get an API description. + if arguments["get"].(bool) { + // Read the list of APIs from the apis/list service. + bytes, err := compiler.FetchFile(APIsListServiceURL) + if err != nil { + log.Fatalf("%+v", err) + } + // Unpack the apis/list response + listResponse, err := NewList(bytes) + if arguments["--all"].(bool) { + if !arguments["--raw"].(bool) && + !arguments["--openapi2"].(bool) && + !arguments["--openapi3"].(bool) && + !arguments["--features"].(bool) && + !arguments["--schemas"].(bool) { + log.Fatalf("Please specify an output option.") + } + for _, api := range listResponse.APIs { + log.Printf("%s/%s", api.Name, api.Version) + // Fetch the discovery description of the API. + bytes, err = compiler.FetchFile(api.DiscoveryRestURL) + if err != nil { + log.Printf("%+v", err) + continue + } + // Export any requested formats. + _, err := handleExportArgumentsForBytes(arguments, bytes) + if err != nil { + log.Printf("%+v", err) + continue + } + } + } else { + // Find the matching API + var apiName string + if arguments[""] != nil { + apiName = arguments[""].(string) + } + var apiVersion string + if arguments[""] != nil { + apiVersion = arguments[""].(string) + } + // Get the description of an API. + api, err := listResponse.APIWithNameAndVersion(apiName, apiVersion) + if err != nil { + log.Fatalf("%+v", err) + } + // Fetch the discovery description of the API. + bytes, err = compiler.FetchFile(api.DiscoveryRestURL) + if err != nil { + log.Fatalf("%+v", err) + } + // Export any requested formats. + handled, err := handleExportArgumentsForBytes(arguments, bytes) + if err != nil { + log.Fatalf("%+v", err) + } else if !handled { + // If no action was requested, write the document to stdout. + os.Stdout.Write(bytes) + } + } + } + + // Do something with a local API description. + if arguments[""] != nil { + // Read the local file. + filename := arguments[""].(string) + bytes, err := ioutil.ReadFile(filename) + if err != nil { + log.Fatalf("%+v", err) + } + // Export any requested formats. + _, err = handleExportArgumentsForBytes(arguments, bytes) + if err != nil { + log.Fatalf("%+v", err) + } + } +} + +func handleExportArgumentsForBytes(arguments map[string]interface{}, bytes []byte) (handled bool, err error) { + // Unpack the discovery document. + info, err := compiler.ReadInfoFromBytes("source", bytes) + if err != nil { + return true, err + } + m, ok := compiler.UnpackMap(info) + if !ok { + log.Printf("%s", string(bytes)) + return true, errors.New("Invalid input") + } + document, err := discovery.NewDocument(m, compiler.NewContext("$root", nil)) + if arguments["--raw"].(bool) { + // Write the Discovery document as a JSON file. + filename := "disco-" + document.Name + "-" + document.Version + ".json" + ioutil.WriteFile(filename, bytes, 0644) + handled = true + } + if arguments["--features"].(bool) { + if len(document.Features) > 0 { + log.Printf("%s/%s features: %s\n", + document.Name, + document.Version, + strings.Join(document.Features, ",")) + } + } + if arguments["--schemas"].(bool) { + for _, schema := range document.Schemas.AdditionalProperties { + checkSchema(schema.Name, schema.Value, 0) + } + } + if arguments["--openapi3"].(bool) { + // Generate the OpenAPI 3 equivalent. + openAPIDocument, err := OpenAPIv3(document) + if err != nil { + return handled, err + } + bytes, err = proto.Marshal(openAPIDocument) + if err != nil { + return handled, err + } + filename := "openapi3-" + document.Name + "-" + document.Version + ".pb" + err = ioutil.WriteFile(filename, bytes, 0644) + if err != nil { + return handled, err + } + handled = true + } + if arguments["--openapi2"].(bool) { + // Generate the OpenAPI 2 equivalent. + openAPIDocument, err := OpenAPIv2(document) + if err != nil { + return handled, err + } + bytes, err = proto.Marshal(openAPIDocument) + if err != nil { + return handled, err + } + filename := "openapi2-" + document.Name + "-" + document.Version + ".pb" + err = ioutil.WriteFile(filename, bytes, 0644) + if err != nil { + return handled, err + } + handled = true + } + + return handled, err +} + +func checkSchema(schemaName string, schema *discovery.Schema, depth int) { + switch schema.Type { + case "string": + case "number": + case "integer": + case "boolean": + case "object": // only objects should have properties... + case "array": + case "null": + log.Printf("NULL TYPE %s %s", schemaName, schema.Type) + case "any": + //log.Printf("ANY TYPE %s/%s %s", schemaName, property.Name, propertySchema.Type) + default: + //log.Printf("UNKNOWN TYPE %s/%s %s", schemaName, property.Name, propertySchema.Type) + } + if len(schema.Properties.AdditionalProperties) > 0 { + if depth > 0 { + log.Printf("ANONYMOUS SCHEMA %s", schemaName) + } + for _, property := range schema.Properties.AdditionalProperties { + propertySchema := property.Value + ref := propertySchema.XRef + if ref != "" { + //log.Printf("REF: %s", ref) + // assert (propertySchema.Type == "") + } else { + checkSchema(schemaName+"/"+property.Name, propertySchema, depth+1) + } + } + } + if schema.AdditionalProperties != nil { + log.Printf("ADDITIONAL PROPERTIES %s", schemaName) + checkSchema(schemaName+"/*", schema.AdditionalProperties, depth+1) + } +} diff --git a/vendor/github.com/googleapis/gnostic/apps/disco/openapiv2.go b/vendor/github.com/googleapis/gnostic/apps/disco/openapiv2.go new file mode 100644 index 000000000..042bde54d --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/apps/disco/openapiv2.go @@ -0,0 +1,287 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "log" + "net/url" + + discovery "github.com/googleapis/gnostic/discovery" + openapi2 "github.com/googleapis/gnostic/OpenAPIv2" +) + +func addOpenAPI2SchemaForSchema(d *openapi2.Document, name string, schema *discovery.Schema) { + //log.Printf("SCHEMA %s\n", name) + d.Definitions.AdditionalProperties = append(d.Definitions.AdditionalProperties, + &openapi2.NamedSchema{ + Name: name, + Value: buildOpenAPI2SchemaForSchema(schema), + }) +} + +func buildOpenAPI2SchemaForSchema(schema *discovery.Schema) *openapi2.Schema { + s := &openapi2.Schema{} + + if description := schema.Description; description != "" { + s.Description = description + } + if typeName := schema.Type; typeName != "" { + s.Type = &openapi2.TypeItem{[]string{typeName}} + } + if ref := schema.XRef; ref != "" { + s.XRef = "#/definitions/" + ref + } + if len(schema.Enum) > 0 { + for _, e := range schema.Enum { + s.Enum = append(s.Enum, &openapi2.Any{Yaml: e}) + } + } + if schema.Items != nil { + s2 := buildOpenAPI2SchemaForSchema(schema.Items) + s.Items = &openapi2.ItemsItem{} + s.Items.Schema = append(s.Items.Schema, s2) + } + if schema.Properties != nil { + if len(schema.Properties.AdditionalProperties) > 0 { + s.Properties = &openapi2.Properties{} + for _, pair := range schema.Properties.AdditionalProperties { + s.Properties.AdditionalProperties = append(s.Properties.AdditionalProperties, + &openapi2.NamedSchema{ + Name: pair.Name, + Value: buildOpenAPI2SchemaForSchema(pair.Value), + }, + ) + } + } + } + // assume that all schemas are closed + s.AdditionalProperties = &openapi2.AdditionalPropertiesItem{Oneof: &openapi2.AdditionalPropertiesItem_Boolean{Boolean: false}} + return s +} + +func buildOpenAPI2ParameterForParameter(name string, p *discovery.Parameter) *openapi2.Parameter { + //log.Printf("- PARAMETER %+v\n", p.Name) + typeName := p.Type + format := p.Format + location := p.Location + switch location { + case "query": + return &openapi2.Parameter{ + Oneof: &openapi2.Parameter_NonBodyParameter{ + NonBodyParameter: &openapi2.NonBodyParameter{ + Oneof: &openapi2.NonBodyParameter_QueryParameterSubSchema{ + QueryParameterSubSchema: &openapi2.QueryParameterSubSchema{ + Name: name, + In: "query", + Description: p.Description, + Required: p.Required, + Type: typeName, + Format: format, + }, + }, + }, + }, + } + case "path": + return &openapi2.Parameter{ + Oneof: &openapi2.Parameter_NonBodyParameter{ + NonBodyParameter: &openapi2.NonBodyParameter{ + Oneof: &openapi2.NonBodyParameter_PathParameterSubSchema{ + PathParameterSubSchema: &openapi2.PathParameterSubSchema{ + Name: name, + In: "path", + Description: p.Description, + Required: p.Required, + Type: typeName, + Format: format, + }, + }, + }, + }, + } + default: + return nil + } +} + +func buildOpenAPI2ParameterForRequest(p *discovery.Request) *openapi2.Parameter { + return &openapi2.Parameter{ + Oneof: &openapi2.Parameter_BodyParameter{ + BodyParameter: &openapi2.BodyParameter{ + Name: "resource", + In: "body", + Description: "", + Schema: &openapi2.Schema{XRef: "#/definitions/" + p.XRef}, + }, + }, + } +} + +func buildOpenAPI2ResponseForResponse(response *discovery.Response) *openapi2.Response { + //log.Printf("- RESPONSE %+v\n", schema) + if response == nil { + return &openapi2.Response{ + Description: "Successful operation", + } + } + ref := response.XRef + if ref == "" { + log.Printf("WARNING: Unhandled response %+v", response) + } + return &openapi2.Response{ + Description: "Successful operation", + Schema: &openapi2.SchemaItem{ + Oneof: &openapi2.SchemaItem_Schema{ + Schema: &openapi2.Schema{ + XRef: "#/definitions/" + ref, + }, + }, + }, + } +} + +func buildOpenAPI2OperationForMethod(method *discovery.Method) *openapi2.Operation { + //log.Printf("METHOD %s %s %s %s\n", method.Name, method.path(), method.HTTPMethod, method.ID) + //log.Printf("MAP %+v\n", method.JSONMap) + parameters := make([]*openapi2.ParametersItem, 0) + if method.Parameters != nil { + for _, pair := range method.Parameters.AdditionalProperties { + parameters = append(parameters, &openapi2.ParametersItem{ + Oneof: &openapi2.ParametersItem_Parameter{ + Parameter: buildOpenAPI2ParameterForParameter(pair.Name, pair.Value), + }, + }) + } + } + responses := &openapi2.Responses{ + ResponseCode: []*openapi2.NamedResponseValue{ + &openapi2.NamedResponseValue{ + Name: "default", + Value: &openapi2.ResponseValue{ + Oneof: &openapi2.ResponseValue_Response{ + Response: buildOpenAPI2ResponseForResponse(method.Response), + }, + }, + }, + }, + } + if method.Request != nil { + parameter := buildOpenAPI2ParameterForRequest(method.Request) + parameters = append(parameters, &openapi2.ParametersItem{ + Oneof: &openapi2.ParametersItem_Parameter{ + Parameter: parameter, + }, + }) + } + return &openapi2.Operation{ + Description: method.Description, + OperationId: method.Id, + Parameters: parameters, + Responses: responses, + } +} + +func getOpenAPI2PathItemForPath(d *openapi2.Document, path string) *openapi2.PathItem { + // First, try to find a path item with the specified path. If it exists, return it. + for _, item := range d.Paths.Path { + if item.Name == path { + return item.Value + } + } + // Otherwise, create and return a new path item. + pathItem := &openapi2.PathItem{} + d.Paths.Path = append(d.Paths.Path, + &openapi2.NamedPathItem{ + Name: path, + Value: pathItem, + }, + ) + return pathItem +} + +func addOpenAPI2PathsForMethod(d *openapi2.Document, name string, method *discovery.Method) { + operation := buildOpenAPI2OperationForMethod(method) + pathItem := getOpenAPI2PathItemForPath(d, pathForMethod(method.Path)) + switch method.HttpMethod { + case "GET": + pathItem.Get = operation + case "POST": + pathItem.Post = operation + case "PUT": + pathItem.Put = operation + case "DELETE": + pathItem.Delete = operation + case "PATCH": + pathItem.Patch = operation + default: + log.Printf("WARNING: Unknown HTTP method %s", method.HttpMethod) + } +} + +func addOpenAPI2PathsForResource(d *openapi2.Document, name string, resource *discovery.Resource) { + //log.Printf("RESOURCE %s (%s)\n", resource.Name, resource.FullName) + if resource.Methods != nil { + for _, pair := range resource.Methods.AdditionalProperties { + addOpenAPI2PathsForMethod(d, pair.Name, pair.Value) + } + } + if resource.Resources != nil { + for _, pair := range resource.Resources.AdditionalProperties { + addOpenAPI2PathsForResource(d, pair.Name, pair.Value) + } + } +} + +func removeTrailingSlash(path string) string { + if len(path) > 1 && path[len(path)-1] == '/' { + return path[0: len(path)-1] + } + return path +} + +// OpenAPIv2 returns an OpenAPI v2 representation of this Discovery document +func OpenAPIv2(api *discovery.Document) (*openapi2.Document, error) { + d := &openapi2.Document{} + d.Swagger = "2.0" + d.Info = &openapi2.Info{ + Title: api.Title, + Version: api.Version, + Description: api.Description, + } + url, _ := url.Parse(api.RootUrl) + d.Host = url.Host + d.BasePath = removeTrailingSlash(api.BasePath) + d.Schemes = []string{url.Scheme} + d.Consumes = []string{"application/json"} + d.Produces = []string{"application/json"} + d.Paths = &openapi2.Paths{} + d.Definitions = &openapi2.Definitions{} + if api.Schemas != nil { + for _, pair := range api.Schemas.AdditionalProperties { + addOpenAPI2SchemaForSchema(d, pair.Name, pair.Value) + } + } + if api.Methods != nil { + for _, pair := range api.Methods.AdditionalProperties { + addOpenAPI2PathsForMethod(d, pair.Name, pair.Value) + } + } + if api.Resources != nil { + for _, pair := range api.Resources.AdditionalProperties { + addOpenAPI2PathsForResource(d, pair.Name, pair.Value) + } + } + return d, nil +} diff --git a/vendor/github.com/googleapis/gnostic/apps/disco/openapiv3.go b/vendor/github.com/googleapis/gnostic/apps/disco/openapiv3.go new file mode 100644 index 000000000..763014aef --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/apps/disco/openapiv3.go @@ -0,0 +1,303 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package main + +import ( + "log" + "net/url" + "strings" + + discovery "github.com/googleapis/gnostic/discovery" + openapi3 "github.com/googleapis/gnostic/OpenAPIv3" +) + +func pathForMethod(path string) string { + return "/" + strings.Replace(path, "{+", "{", -1) +} + +func addOpenAPI3SchemaForSchema(d *openapi3.Document, name string, schema *discovery.Schema) { + d.Components.Schemas.AdditionalProperties = append(d.Components.Schemas.AdditionalProperties, + &openapi3.NamedSchemaOrReference{ + Name: name, + Value: buildOpenAPI3SchemaOrReferenceForSchema(schema), + }) +} + +func buildOpenAPI3SchemaOrReferenceForSchema(schema *discovery.Schema) *openapi3.SchemaOrReference { + if ref := schema.XRef; ref != "" { + return &openapi3.SchemaOrReference{ + Oneof: &openapi3.SchemaOrReference_Reference{ + Reference: &openapi3.Reference{ + XRef: "#/definitions/" + ref, + }, + }, + } + } + + s := &openapi3.Schema{} + + if description := schema.Description; description != "" { + s.Description = description + } + if typeName := schema.Type; typeName != "" { + s.Type = typeName + } + if len(schema.Enum) > 0 { + for _, e := range schema.Enum { + s.Enum = append(s.Enum, &openapi3.Any{Yaml: e}) + } + } + if schema.Items != nil { + s2 := buildOpenAPI3SchemaOrReferenceForSchema(schema.Items) + s.Items = &openapi3.ItemsItem{} + s.Items.SchemaOrReference = append(s.Items.SchemaOrReference, s2) + } + if (schema.Properties != nil) && (len(schema.Properties.AdditionalProperties) > 0) { + s.Properties = &openapi3.Properties{} + for _, pair := range schema.Properties.AdditionalProperties { + s.Properties.AdditionalProperties = append(s.Properties.AdditionalProperties, + &openapi3.NamedSchemaOrReference{ + Name: pair.Name, + Value: buildOpenAPI3SchemaOrReferenceForSchema(pair.Value), + }, + ) + } + } + return &openapi3.SchemaOrReference{ + Oneof: &openapi3.SchemaOrReference_Schema{ + Schema: s, + }, + } +} + +func buildOpenAPI3ParameterForParameter(name string, p *discovery.Parameter) *openapi3.Parameter { + typeName := p.Type + format := p.Format + location := p.Location + switch location { + case "query", "path": + return &openapi3.Parameter{ + Name: name, + In: location, + Description: p.Description, + Required: p.Required, + Schema: &openapi3.SchemaOrReference{ + Oneof: &openapi3.SchemaOrReference_Schema{ + Schema: &openapi3.Schema{ + Type: typeName, + Format: format, + }, + }, + }, + } + default: + return nil + } +} + +func buildOpenAPI3RequestBodyForRequest(request *discovery.Request) *openapi3.RequestBody { + ref := request.XRef + if ref == "" { + log.Printf("WARNING: Unhandled request schema %+v", request) + } + return &openapi3.RequestBody{ + Content: &openapi3.MediaTypes{ + AdditionalProperties: []*openapi3.NamedMediaType{ + &openapi3.NamedMediaType{ + Name: "application/json", + Value: &openapi3.MediaType{ + Schema: &openapi3.SchemaOrReference{ + Oneof: &openapi3.SchemaOrReference_Reference{ + Reference: &openapi3.Reference{ + XRef: "#/definitions/" + ref, + }, + }, + }, + }, + }, + }, + }, + } +} + +func buildOpenAPI3ResponseForResponse(response *discovery.Response, hasDataWrapper bool) *openapi3.Response { + if response == nil { + return &openapi3.Response{ + Description: "Successful operation", + } + } else { + ref := response.XRef + if ref == "" { + log.Printf("WARNING: Unhandled response %+v", response) + } + return &openapi3.Response{ + Description: "Successful operation", + Content: &openapi3.MediaTypes{ + AdditionalProperties: []*openapi3.NamedMediaType{ + &openapi3.NamedMediaType{ + Name: "application/json", + Value: &openapi3.MediaType{ + Schema: &openapi3.SchemaOrReference{ + Oneof: &openapi3.SchemaOrReference_Reference{ + Reference: &openapi3.Reference{ + XRef: "#/definitions/" + ref, + }, + }, + }, + }, + }, + }, + }, + } + } +} + +func buildOpenAPI3OperationForMethod(method *discovery.Method, hasDataWrapper bool) *openapi3.Operation { + if method == nil { + return nil + } + parameters := make([]*openapi3.ParameterOrReference, 0) + if method.Parameters != nil { + for _, pair := range method.Parameters.AdditionalProperties { + parameters = append(parameters, &openapi3.ParameterOrReference{ + Oneof: &openapi3.ParameterOrReference_Parameter{ + Parameter: buildOpenAPI3ParameterForParameter(pair.Name, pair.Value), + }, + }) + } + } + responses := &openapi3.Responses{ + ResponseOrReference: []*openapi3.NamedResponseOrReference{ + &openapi3.NamedResponseOrReference{ + Name: "default", + Value: &openapi3.ResponseOrReference{ + Oneof: &openapi3.ResponseOrReference_Response{ + Response: buildOpenAPI3ResponseForResponse(method.Response, hasDataWrapper), + }, + }, + }, + }, + } + var requestBodyOrReference *openapi3.RequestBodyOrReference + if method.Request != nil { + requestBody := buildOpenAPI3RequestBodyForRequest(method.Request) + requestBodyOrReference = &openapi3.RequestBodyOrReference{ + Oneof: &openapi3.RequestBodyOrReference_RequestBody{ + RequestBody: requestBody, + }, + } + } + return &openapi3.Operation{ + Description: method.Description, + OperationId: method.Id, + Parameters: parameters, + Responses: responses, + RequestBody: requestBodyOrReference, + } +} + +func getOpenAPI3PathItemForPath(d *openapi3.Document, path string) *openapi3.PathItem { + // First, try to find a path item with the specified path. If it exists, return it. + for _, item := range d.Paths.Path { + if item.Name == path { + return item.Value + } + } + // Otherwise, create and return a new path item. + pathItem := &openapi3.PathItem{} + d.Paths.Path = append(d.Paths.Path, + &openapi3.NamedPathItem{ + Name: path, + Value: pathItem, + }, + ) + return pathItem +} + +func addOpenAPI3PathsForMethod(d *openapi3.Document, name string, method *discovery.Method, hasDataWrapper bool) { + operation := buildOpenAPI3OperationForMethod(method, hasDataWrapper) + pathItem := getOpenAPI3PathItemForPath(d, pathForMethod(method.Path)) + switch method.HttpMethod { + case "GET": + pathItem.Get = operation + case "POST": + pathItem.Post = operation + case "PUT": + pathItem.Put = operation + case "DELETE": + pathItem.Delete = operation + case "PATCH": + pathItem.Patch = operation + default: + log.Printf("WARNING: Unknown HTTP method %s", method.HttpMethod) + } +} + +func addOpenAPI3PathsForResource(d *openapi3.Document, resource *discovery.Resource, hasDataWrapper bool) { + if resource.Methods != nil { + for _, pair := range resource.Methods.AdditionalProperties { + addOpenAPI3PathsForMethod(d, pair.Name, pair.Value, hasDataWrapper) + } + } + if resource.Resources != nil { + for _, pair := range resource.Resources.AdditionalProperties { + addOpenAPI3PathsForResource(d, pair.Value, hasDataWrapper) + } + } +} + +// OpenAPIv3 returns an OpenAPI v3 representation of a Discovery document +func OpenAPIv3(api *discovery.Document) (*openapi3.Document, error) { + d := &openapi3.Document{} + d.Openapi = "3.0" + d.Info = &openapi3.Info{ + Title: api.Title, + Version: api.Version, + Description: api.Description, + } + d.Servers = make([]*openapi3.Server, 0) + + url, _ := url.Parse(api.RootUrl) + host := url.Host + basePath := api.BasePath + if basePath == "" { + basePath = "/" + } + d.Servers = append(d.Servers, &openapi3.Server{Url: "https://" + host + basePath}) + + hasDataWrapper := false + for _, feature := range api.Features { + if feature == "dataWrapper" { + hasDataWrapper = true + } + } + + d.Components = &openapi3.Components{} + d.Components.Schemas = &openapi3.SchemasOrReferences{} + for _, pair := range api.Schemas.AdditionalProperties { + addOpenAPI3SchemaForSchema(d, pair.Name, pair.Value) + } + + d.Paths = &openapi3.Paths{} + if api.Methods != nil { + for _, pair := range api.Methods.AdditionalProperties { + addOpenAPI3PathsForMethod(d, pair.Name, pair.Value, hasDataWrapper) + } + } + for _, pair := range api.Resources.AdditionalProperties { + addOpenAPI3PathsForResource(d, pair.Value, hasDataWrapper) + } + + return d, nil +} diff --git a/vendor/github.com/googleapis/gnostic/apps/petstore-builder/README.md b/vendor/github.com/googleapis/gnostic/apps/petstore-builder/README.md new file mode 100644 index 000000000..12eebb628 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/apps/petstore-builder/README.md @@ -0,0 +1,5 @@ +# OpenAPI Builder Sample + +This directory contains a simple sample application that builds +and exports an OpenAPI 2.0 description of a sample API. + diff --git a/vendor/github.com/googleapis/gnostic/apps/petstore-builder/main.go b/vendor/github.com/googleapis/gnostic/apps/petstore-builder/main.go new file mode 100644 index 000000000..8a3eb8cdd --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/apps/petstore-builder/main.go @@ -0,0 +1,82 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "fmt" + "io/ioutil" + "os" + "path" + + "github.com/golang/protobuf/proto" +) + +func usage() string { + return fmt.Sprintf(` +Usage: %s [OPTIONS] +Options: + --v2 + Generate an OpenAPI v2 description. + --v3 + Generate an OpenAPI v3 description. +`, path.Base(os.Args[0])) +} + +func main() { + openAPIv2 := false + openAPIv3 := false + + for i, arg := range os.Args { + if i == 0 { + continue // skip the tool name + } + if arg == "--v2" { + openAPIv2 = true + } else if arg == "--v3" { + openAPIv3 = true + } else { + fmt.Printf("Unknown option: %s.\n%s\n", arg, usage()) + os.Exit(-1) + } + } + + if !openAPIv2 && !openAPIv3 { + openAPIv2 = true + } + + if openAPIv2 { + document := buildDocumentV2() + bytes, err := proto.Marshal(document) + if err != nil { + panic(err) + } + err = ioutil.WriteFile("petstore-v2.pb", bytes, 0644) + if err != nil { + panic(err) + } + } + + if openAPIv3 { + document := buildDocumentV3() + bytes, err := proto.Marshal(document) + if err != nil { + panic(err) + } + err = ioutil.WriteFile("petstore-v3.pb", bytes, 0644) + if err != nil { + panic(err) + } + } +} diff --git a/vendor/github.com/googleapis/gnostic/apps/petstore-builder/petstore-v2.go b/vendor/github.com/googleapis/gnostic/apps/petstore-builder/petstore-v2.go new file mode 100644 index 000000000..5531d35ca --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/apps/petstore-builder/petstore-v2.go @@ -0,0 +1,260 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + v2 "github.com/googleapis/gnostic/OpenAPIv2" +) + +func buildDocumentV2() *v2.Document { + d := &v2.Document{} + d.Swagger = "2.0" + d.Info = &v2.Info{ + Title: "Swagger Petstore", + Version: "1.0.0", + License: &v2.License{Name: "MIT"}, + } + d.Host = "petstore.swagger.io" + d.BasePath = "/v1" + d.Schemes = []string{"http"} + d.Consumes = []string{"application/json"} + d.Produces = []string{"application/json"} + d.Paths = &v2.Paths{} + d.Paths.Path = append(d.Paths.Path, + &v2.NamedPathItem{ + Name: "/pets", + Value: &v2.PathItem{ + Get: &v2.Operation{ + Summary: "List all pets", + OperationId: "listPets", + Tags: []string{"pets"}, + Parameters: []*v2.ParametersItem{ + &v2.ParametersItem{ + Oneof: &v2.ParametersItem_Parameter{ + Parameter: &v2.Parameter{ + Oneof: &v2.Parameter_NonBodyParameter{ + NonBodyParameter: &v2.NonBodyParameter{ + Oneof: &v2.NonBodyParameter_QueryParameterSubSchema{ + QueryParameterSubSchema: &v2.QueryParameterSubSchema{ + Name: "limit", + In: "query", + Description: "How many items to return at one time (max 100)", + Required: false, + Type: "integer", + Format: "int32", + }, + }, + }, + }, + }, + }, + }, + }, + Responses: &v2.Responses{ + ResponseCode: []*v2.NamedResponseValue{ + &v2.NamedResponseValue{ + Name: "200", + Value: &v2.ResponseValue{ + Oneof: &v2.ResponseValue_Response{ + Response: &v2.Response{ + Description: "An paged array of pets", // [sic] match other examples + Schema: &v2.SchemaItem{ + Oneof: &v2.SchemaItem_Schema{ + Schema: &v2.Schema{ + XRef: "#/definitions/Pets", + }, + }, + }, + Headers: &v2.Headers{ + AdditionalProperties: []*v2.NamedHeader{ + &v2.NamedHeader{ + Name: "x-next", + Value: &v2.Header{ + Type: "string", + Description: "A link to the next page of responses", + }, + }, + }, + }, + }, + }, + }, + }, + &v2.NamedResponseValue{ + Name: "default", + Value: &v2.ResponseValue{ + Oneof: &v2.ResponseValue_Response{ + Response: &v2.Response{ + Description: "unexpected error", + Schema: &v2.SchemaItem{ + Oneof: &v2.SchemaItem_Schema{ + Schema: &v2.Schema{ + XRef: "#/definitions/Error", + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + Post: &v2.Operation{ + Summary: "Create a pet", + OperationId: "createPets", + Tags: []string{"pets"}, + Parameters: []*v2.ParametersItem{}, + Responses: &v2.Responses{ + ResponseCode: []*v2.NamedResponseValue{ + &v2.NamedResponseValue{ + Name: "201", + Value: &v2.ResponseValue{ + Oneof: &v2.ResponseValue_Response{ + Response: &v2.Response{ + Description: "Null response", + }, + }, + }, + }, + &v2.NamedResponseValue{ + Name: "default", + Value: &v2.ResponseValue{ + Oneof: &v2.ResponseValue_Response{ + Response: &v2.Response{ + Description: "unexpected error", + Schema: &v2.SchemaItem{ + Oneof: &v2.SchemaItem_Schema{ + Schema: &v2.Schema{ + XRef: "#/definitions/Error", + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }}) + d.Paths.Path = append(d.Paths.Path, + &v2.NamedPathItem{ + Name: "/pets/{petId}", + Value: &v2.PathItem{ + Get: &v2.Operation{ + Summary: "Info for a specific pet", + OperationId: "showPetById", + Tags: []string{"pets"}, + Parameters: []*v2.ParametersItem{ + &v2.ParametersItem{ + Oneof: &v2.ParametersItem_Parameter{ + Parameter: &v2.Parameter{ + Oneof: &v2.Parameter_NonBodyParameter{ + NonBodyParameter: &v2.NonBodyParameter{ + Oneof: &v2.NonBodyParameter_PathParameterSubSchema{ + PathParameterSubSchema: &v2.PathParameterSubSchema{ + Name: "petId", + In: "path", + Description: "The id of the pet to retrieve", + Required: true, + Type: "string", + }, + }, + }, + }, + }, + }, + }, + }, + Responses: &v2.Responses{ + ResponseCode: []*v2.NamedResponseValue{ + &v2.NamedResponseValue{ + Name: "200", + Value: &v2.ResponseValue{ + Oneof: &v2.ResponseValue_Response{ + Response: &v2.Response{ + Description: "Expected response to a valid request", + Schema: &v2.SchemaItem{ + Oneof: &v2.SchemaItem_Schema{ + Schema: &v2.Schema{ + XRef: "#/definitions/Pets", + }, + }, + }, + }, + }, + }, + }, + &v2.NamedResponseValue{ + Name: "default", + Value: &v2.ResponseValue{ + Oneof: &v2.ResponseValue_Response{ + Response: &v2.Response{ + Description: "unexpected error", + Schema: &v2.SchemaItem{ + Oneof: &v2.SchemaItem_Schema{ + Schema: &v2.Schema{ + XRef: "#/definitions/Error", + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }}) + d.Definitions = &v2.Definitions{} + d.Definitions.AdditionalProperties = append(d.Definitions.AdditionalProperties, + &v2.NamedSchema{ + Name: "Pet", + Value: &v2.Schema{ + Required: []string{"id", "name"}, + Properties: &v2.Properties{ + AdditionalProperties: []*v2.NamedSchema{ + &v2.NamedSchema{Name: "id", Value: &v2.Schema{ + Type: &v2.TypeItem{[]string{"integer"}}, + Format: "int64"}}, + &v2.NamedSchema{Name: "name", Value: &v2.Schema{Type: &v2.TypeItem{[]string{"string"}}}}, + &v2.NamedSchema{Name: "tag", Value: &v2.Schema{Type: &v2.TypeItem{[]string{"string"}}}}, + }, + }, + }}) + d.Definitions.AdditionalProperties = append(d.Definitions.AdditionalProperties, + &v2.NamedSchema{ + Name: "Pets", + Value: &v2.Schema{ + Type: &v2.TypeItem{[]string{"array"}}, + Items: &v2.ItemsItem{[]*v2.Schema{&v2.Schema{XRef: "#/definitions/Pet"}}}, + }}) + d.Definitions.AdditionalProperties = append(d.Definitions.AdditionalProperties, + &v2.NamedSchema{ + Name: "Error", + Value: &v2.Schema{ + Required: []string{"code", "message"}, + Properties: &v2.Properties{ + AdditionalProperties: []*v2.NamedSchema{ + &v2.NamedSchema{Name: "code", Value: &v2.Schema{ + Type: &v2.TypeItem{[]string{"integer"}}, + Format: "int32"}}, + &v2.NamedSchema{Name: "message", Value: &v2.Schema{Type: &v2.TypeItem{[]string{"string"}}}}, + }, + }, + }}) + return d +} diff --git a/vendor/github.com/googleapis/gnostic/apps/petstore-builder/petstore-v3.go b/vendor/github.com/googleapis/gnostic/apps/petstore-builder/petstore-v3.go new file mode 100644 index 000000000..b5ea0e2ed --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/apps/petstore-builder/petstore-v3.go @@ -0,0 +1,369 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + v3 "github.com/googleapis/gnostic/OpenAPIv3" +) + +func buildDocumentV3() *v3.Document { + d := &v3.Document{} + d.Openapi = "3.0" + d.Info = &v3.Info{ + Title: "OpenAPI Petstore", + Version: "1.0.0", + License: &v3.License{Name: "MIT"}, + } + d.Servers = append(d.Servers, &v3.Server{ + Url: "https://petstore.openapis.org/v1", + Description: "Development server", + }) + d.Paths = &v3.Paths{} + d.Paths.Path = append(d.Paths.Path, + &v3.NamedPathItem{ + Name: "/pets", + Value: &v3.PathItem{ + Get: &v3.Operation{ + Summary: "List all pets", + OperationId: "listPets", + Tags: []string{"pets"}, + Parameters: []*v3.ParameterOrReference{ + &v3.ParameterOrReference{ + Oneof: &v3.ParameterOrReference_Parameter{ + Parameter: &v3.Parameter{ + Name: "limit", + In: "query", + Description: "How many items to return at one time (max 100)", + Required: false, + Schema: &v3.SchemaOrReference{ + Oneof: &v3.SchemaOrReference_Schema{ + Schema: &v3.Schema{ + Type: "integer", + Format: "int32", + }, + }, + }, + }, + }, + }, + }, + Responses: &v3.Responses{ + Default: &v3.ResponseOrReference{ + Oneof: &v3.ResponseOrReference_Response{ + Response: &v3.Response{ + Description: "unexpected error", + Content: &v3.MediaTypes{ + AdditionalProperties: []*v3.NamedMediaType{ + &v3.NamedMediaType{ + Name: "application/json", + Value: &v3.MediaType{ + Schema: &v3.SchemaOrReference{ + Oneof: &v3.SchemaOrReference_Reference{ + Reference: &v3.Reference{ + XRef: "#/components/schemas/Error", + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + ResponseOrReference: []*v3.NamedResponseOrReference{ + &v3.NamedResponseOrReference{ + Name: "200", + Value: &v3.ResponseOrReference{ + Oneof: &v3.ResponseOrReference_Response{ + Response: &v3.Response{ + Description: "An paged array of pets", // [sic] match other examples + Content: &v3.MediaTypes{ + AdditionalProperties: []*v3.NamedMediaType{ + &v3.NamedMediaType{ + Name: "application/json", + Value: &v3.MediaType{ + Schema: &v3.SchemaOrReference{ + Oneof: &v3.SchemaOrReference_Reference{ + Reference: &v3.Reference{ + XRef: "#/components/schemas/Pets", + }, + }, + }, + }, + }, + }, + }, + Headers: &v3.HeadersOrReferences{ + AdditionalProperties: []*v3.NamedHeaderOrReference{ + &v3.NamedHeaderOrReference{ + Name: "x-next", + Value: &v3.HeaderOrReference{ + Oneof: &v3.HeaderOrReference_Header{ + Header: &v3.Header{ + Description: "A link to the next page of responses", + Schema: &v3.SchemaOrReference{ + Oneof: &v3.SchemaOrReference_Schema{ + Schema: &v3.Schema{ + Type: "string", + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + Post: &v3.Operation{ + Summary: "Create a pet", + OperationId: "createPets", + Tags: []string{"pets"}, + Responses: &v3.Responses{ + Default: &v3.ResponseOrReference{ + Oneof: &v3.ResponseOrReference_Response{ + Response: &v3.Response{ + Description: "unexpected error", + Content: &v3.MediaTypes{ + AdditionalProperties: []*v3.NamedMediaType{ + &v3.NamedMediaType{ + Name: "application/json", + Value: &v3.MediaType{ + Schema: &v3.SchemaOrReference{ + Oneof: &v3.SchemaOrReference_Reference{ + Reference: &v3.Reference{ + XRef: "#/components/schemas/Error", + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + ResponseOrReference: []*v3.NamedResponseOrReference{ + &v3.NamedResponseOrReference{ + Name: "201", + Value: &v3.ResponseOrReference{ + Oneof: &v3.ResponseOrReference_Response{ + Response: &v3.Response{ + Description: "Null response", + }, + }, + }, + }, + }, + }, + }, + }}, + &v3.NamedPathItem{ + Name: "/pets/{petId}", + Value: &v3.PathItem{ + Get: &v3.Operation{ + Summary: "Info for a specific pet", + OperationId: "showPetById", + Tags: []string{"pets"}, + Parameters: []*v3.ParameterOrReference{ + &v3.ParameterOrReference{ + Oneof: &v3.ParameterOrReference_Parameter{ + Parameter: &v3.Parameter{ + Name: "petId", + In: "path", + Description: "The id of the pet to retrieve", + Required: true, + Schema: &v3.SchemaOrReference{ + Oneof: &v3.SchemaOrReference_Schema{ + Schema: &v3.Schema{ + Type: "string", + }, + }, + }, + }, + }, + }, + }, + Responses: &v3.Responses{ + Default: &v3.ResponseOrReference{ + Oneof: &v3.ResponseOrReference_Response{ + Response: &v3.Response{ + Description: "unexpected error", + Content: &v3.MediaTypes{ + AdditionalProperties: []*v3.NamedMediaType{ + &v3.NamedMediaType{ + Name: "application/json", + Value: &v3.MediaType{ + Schema: &v3.SchemaOrReference{ + Oneof: &v3.SchemaOrReference_Reference{ + Reference: &v3.Reference{ + XRef: "#/components/schemas/Error", + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + ResponseOrReference: []*v3.NamedResponseOrReference{ + &v3.NamedResponseOrReference{ + Name: "200", + Value: &v3.ResponseOrReference{ + Oneof: &v3.ResponseOrReference_Response{ + Response: &v3.Response{ + Description: "Expected response to a valid request", + Content: &v3.MediaTypes{ + AdditionalProperties: []*v3.NamedMediaType{ + &v3.NamedMediaType{ + Name: "application/json", + Value: &v3.MediaType{ + Schema: &v3.SchemaOrReference{ + Oneof: &v3.SchemaOrReference_Reference{ + Reference: &v3.Reference{ + XRef: "#/components/schemas/Pets", + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }}) + d.Components = &v3.Components{ + Schemas: &v3.SchemasOrReferences{ + AdditionalProperties: []*v3.NamedSchemaOrReference{ + &v3.NamedSchemaOrReference{ + Name: "Pet", + Value: &v3.SchemaOrReference{ + Oneof: &v3.SchemaOrReference_Schema{ + Schema: &v3.Schema{ + Required: []string{"id", "name"}, + Properties: &v3.Properties{ + AdditionalProperties: []*v3.NamedSchemaOrReference{ + &v3.NamedSchemaOrReference{ + Name: "id", + Value: &v3.SchemaOrReference{ + Oneof: &v3.SchemaOrReference_Schema{ + Schema: &v3.Schema{ + Type: "integer", + Format: "int64", + }, + }, + }, + }, + &v3.NamedSchemaOrReference{ + Name: "name", + Value: &v3.SchemaOrReference{ + Oneof: &v3.SchemaOrReference_Schema{ + Schema: &v3.Schema{ + Type: "string", + }, + }, + }, + }, + &v3.NamedSchemaOrReference{ + Name: "tag", + Value: &v3.SchemaOrReference{ + Oneof: &v3.SchemaOrReference_Schema{ + Schema: &v3.Schema{ + Type: "string", + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + &v3.NamedSchemaOrReference{ + Name: "Pets", + Value: &v3.SchemaOrReference{ + Oneof: &v3.SchemaOrReference_Schema{ + Schema: &v3.Schema{ + Type: "array", + Items: &v3.ItemsItem{ + SchemaOrReference: []*v3.SchemaOrReference{ + &v3.SchemaOrReference{ + Oneof: &v3.SchemaOrReference_Reference{ + Reference: &v3.Reference{ + XRef: "#/components/schemas/Pet", + }, + }, + }, + }, + }, + }, + }, + }, + }, + &v3.NamedSchemaOrReference{ + Name: "Error", + Value: &v3.SchemaOrReference{ + Oneof: &v3.SchemaOrReference_Schema{ + Schema: &v3.Schema{ + Required: []string{"code", "message"}, + Properties: &v3.Properties{ + AdditionalProperties: []*v3.NamedSchemaOrReference{ + &v3.NamedSchemaOrReference{ + Name: "code", + Value: &v3.SchemaOrReference{ + Oneof: &v3.SchemaOrReference_Schema{ + Schema: &v3.Schema{ + Type: "integer", + Format: "int32", + }, + }, + }, + }, + &v3.NamedSchemaOrReference{ + Name: "message", + Value: &v3.SchemaOrReference{ + Oneof: &v3.SchemaOrReference_Schema{ + Schema: &v3.Schema{ + Type: "string", + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + } + return d +} diff --git a/vendor/github.com/googleapis/gnostic/apps/report/README.md b/vendor/github.com/googleapis/gnostic/apps/report/README.md new file mode 100644 index 000000000..64fcd792d --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/apps/report/README.md @@ -0,0 +1,6 @@ +# OpenAPI Report Sample + +This directory contains a simple sample application that reads a binary +protocol buffer representation of an OpenAPI 2.0 specification that +was generated by gnostic. + diff --git a/vendor/github.com/googleapis/gnostic/apps/report/main.go b/vendor/github.com/googleapis/gnostic/apps/report/main.go new file mode 100644 index 000000000..d2ed3f96f --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/apps/report/main.go @@ -0,0 +1,239 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// report is a demo application that displays information about an +// OpenAPI description. +package main + +import ( + "flag" + "fmt" + "io/ioutil" + "os" + + "github.com/golang/protobuf/proto" + "github.com/googleapis/gnostic/printer" + + pb "github.com/googleapis/gnostic/OpenAPIv2" +) + +func readDocumentFromFileWithName(filename string) *pb.Document { + data, err := ioutil.ReadFile(filename) + if err != nil { + fmt.Printf("File error: %v\n", err) + os.Exit(1) + } + document := &pb.Document{} + err = proto.Unmarshal(data, document) + if err != nil { + panic(err) + } + return document +} + +func printDocument(code *printer.Code, document *pb.Document) { + code.Print("BasePath: %+v", document.BasePath) + code.Print("Consumes: %+v", document.Consumes) + code.Print("Definitions:") + code.Indent() + if document.Definitions != nil && document.Definitions.AdditionalProperties != nil { + for _, pair := range document.Definitions.AdditionalProperties { + code.Print("%s", pair.Name) + code.Indent() + printSchema(code, pair.Value) + code.Outdent() + } + } + code.Outdent() + code.Print("ExternalDocs: %+v", document.ExternalDocs) + code.Print("Host: %+v", document.Host) + if document.Info != nil { + code.Print("Info:") + code.Indent() + code.Print("Title: %s", document.Info.Title) + code.Print("Description: %s", document.Info.Description) + code.Print("Version: %s", document.Info.Version) + code.Print("TermsOfService: %s", document.Info.TermsOfService) + if document.Info.Contact != nil { + code.Print("Contact Email: %s", document.Info.Contact.Email) + } + if document.Info.License != nil { + code.Print("License Name: %s", document.Info.License.Name) + code.Print("License URL: %s", document.Info.License.Url) + } + code.Outdent() + } + code.Print("Parameters: %+v", document.Parameters) + code.Print("Paths:") + code.Indent() + for _, pair := range document.Paths.Path { + code.Print("%+v", pair.Name) + code.Indent() + v := pair.Value + if v.Get != nil { + code.Print("GET") + code.Indent() + printOperation(code, v.Get) + code.Outdent() + } + if v.Post != nil { + code.Print("POST") + code.Indent() + printOperation(code, v.Post) + code.Outdent() + } + code.Outdent() + } + code.Outdent() + code.Print("Produces: %+v", document.Produces) + code.Print("Responses: %+v", document.Responses) + code.Print("Schemes: %+v", document.Schemes) + code.Print("Security: %+v", document.Security) + if document.SecurityDefinitions != nil { + code.Print("SecurityDefinitions:") + code.Indent() + for _, pair := range document.SecurityDefinitions.AdditionalProperties { + code.Print("%s", pair.Name) + code.Indent() + v := pair.Value + switch t := v.Oneof.(type) { + default: + code.Print("unexpected type %T", t) // %T prints whatever type t has + case *pb.SecurityDefinitionsItem_ApiKeySecurity: + code.Print("ApiKeySecurity: %+v", t) + case *pb.SecurityDefinitionsItem_BasicAuthenticationSecurity: + code.Print("BasicAuthenticationSecurity: %+v", t) + case *pb.SecurityDefinitionsItem_Oauth2AccessCodeSecurity: + code.Print("Oauth2AccessCodeSecurity: %+v", t) + case *pb.SecurityDefinitionsItem_Oauth2ApplicationSecurity: + code.Print("Oauth2ApplicationSecurity: %+v", t) + case *pb.SecurityDefinitionsItem_Oauth2ImplicitSecurity: + code.Print("Oauth2ImplicitSecurity") + code.Indent() + code.Print("AuthorizationUrl: %+v", t.Oauth2ImplicitSecurity.AuthorizationUrl) + code.Print("Flow: %+v", t.Oauth2ImplicitSecurity.Flow) + code.Print("Scopes:") + code.Indent() + for _, pair := range t.Oauth2ImplicitSecurity.Scopes.AdditionalProperties { + code.Print("%s -> %s", pair.Name, pair.Value) + } + code.Outdent() + code.Outdent() + case *pb.SecurityDefinitionsItem_Oauth2PasswordSecurity: + code.Print("Oauth2PasswordSecurity: %+v", t) + } + code.Outdent() + } + code.Outdent() + } + code.Print("Swagger: %+v", document.Swagger) + code.Print("Tags:") + code.Indent() + for _, tag := range document.Tags { + code.Print("Tag:") + code.Indent() + code.Print("Name: %s", tag.Name) + code.Print("Description: %s", tag.Description) + code.Print("ExternalDocs: %s", tag.ExternalDocs) + printVendorExtension(code, tag.VendorExtension) + code.Outdent() + } + code.Outdent() +} + +func printOperation(code *printer.Code, operation *pb.Operation) { + code.Print("Consumes: %+v", operation.Consumes) + code.Print("Deprecated: %+v", operation.Deprecated) + code.Print("Description: %+v", operation.Description) + code.Print("ExternalDocs: %+v", operation.ExternalDocs) + code.Print("OperationId: %+v", operation.OperationId) + code.Print("Parameters:") + code.Indent() + for _, item := range operation.Parameters { + switch t := item.Oneof.(type) { + default: + code.Print("unexpected type %T", t) // %T prints whatever type t has + case *pb.ParametersItem_JsonReference: + code.Print("JsonReference: %+v", t) + case *pb.ParametersItem_Parameter: + code.Print("Parameter: %+v", t) + } + } + code.Outdent() + code.Print("Produces: %+v", operation.Produces) + code.Print("Responses:") + code.Indent() + code.Print("ResponseCode:") + code.Indent() + for _, pair := range operation.Responses.ResponseCode { + code.Print("%s %s", pair.Name, pair.Value) + } + code.Outdent() + printVendorExtension(code, operation.Responses.VendorExtension) + code.Outdent() + code.Print("Schemes: %+v", operation.Schemes) + code.Print("Security: %+v", operation.Security) + code.Print("Summary: %+v", operation.Summary) + code.Print("Tags: %+v", operation.Tags) + printVendorExtension(code, operation.VendorExtension) +} + +func printSchema(code *printer.Code, schema *pb.Schema) { + //code.Print("%+v", schema) + if schema.Format != "" { + code.Print("Format: %+v", schema.Format) + } + if schema.Properties != nil { + code.Print("Properties") + code.Indent() + for _, pair := range schema.Properties.AdditionalProperties { + code.Print("%s", pair.Name) + code.Indent() + printSchema(code, pair.Value) + code.Outdent() + } + code.Outdent() + } + if schema.Type != nil { + code.Print("Type: %+v", schema.Type) + } + if schema.Xml != nil { + code.Print("Xml: %+v", schema.Xml) + } + printVendorExtension(code, schema.VendorExtension) +} + +func printVendorExtension(code *printer.Code, vendorExtension []*pb.NamedAny) { + if len(vendorExtension) > 0 { + code.Print("VendorExtension: %+v", vendorExtension) + } +} + +func main() { + flag.Parse() + args := flag.Args() + + if len(args) != 1 { + fmt.Printf("Usage: report \n") + return + } + + document := readDocumentFromFileWithName(args[0]) + + code := &printer.Code{} + code.Print("API REPORT") + code.Print("----------") + printDocument(code, document) + fmt.Printf("%s", code) +} diff --git a/vendor/github.com/googleapis/gnostic/compiler/README.md b/vendor/github.com/googleapis/gnostic/compiler/README.md new file mode 100644 index 000000000..848b16c69 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/compiler/README.md @@ -0,0 +1,3 @@ +# Compiler support code + +This directory contains compiler support code used by Gnostic and Gnostic extensions. \ No newline at end of file diff --git a/vendor/github.com/googleapis/gnostic/compiler/context.go b/vendor/github.com/googleapis/gnostic/compiler/context.go new file mode 100644 index 000000000..a64c1b75d --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/compiler/context.go @@ -0,0 +1,43 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package compiler + +// Context contains state of the compiler as it traverses a document. +type Context struct { + Parent *Context + Name string + ExtensionHandlers *[]ExtensionHandler +} + +// NewContextWithExtensions returns a new object representing the compiler state +func NewContextWithExtensions(name string, parent *Context, extensionHandlers *[]ExtensionHandler) *Context { + return &Context{Name: name, Parent: parent, ExtensionHandlers: extensionHandlers} +} + +// NewContext returns a new object representing the compiler state +func NewContext(name string, parent *Context) *Context { + if parent != nil { + return &Context{Name: name, Parent: parent, ExtensionHandlers: parent.ExtensionHandlers} + } + return &Context{Name: name, Parent: parent, ExtensionHandlers: nil} +} + +// Description returns a text description of the compiler state +func (context *Context) Description() string { + if context.Parent != nil { + return context.Parent.Description() + "." + context.Name + } + return context.Name +} diff --git a/vendor/github.com/googleapis/gnostic/compiler/error.go b/vendor/github.com/googleapis/gnostic/compiler/error.go new file mode 100644 index 000000000..d8672c100 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/compiler/error.go @@ -0,0 +1,61 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package compiler + +// Error represents compiler errors and their location in the document. +type Error struct { + Context *Context + Message string +} + +// NewError creates an Error. +func NewError(context *Context, message string) *Error { + return &Error{Context: context, Message: message} +} + +// Error returns the string value of an Error. +func (err *Error) Error() string { + if err.Context == nil { + return "ERROR " + err.Message + } + return "ERROR " + err.Context.Description() + " " + err.Message +} + +// ErrorGroup is a container for groups of Error values. +type ErrorGroup struct { + Errors []error +} + +// NewErrorGroupOrNil returns a new ErrorGroup for a slice of errors or nil if the slice is empty. +func NewErrorGroupOrNil(errors []error) error { + if len(errors) == 0 { + return nil + } else if len(errors) == 1 { + return errors[0] + } else { + return &ErrorGroup{Errors: errors} + } +} + +func (group *ErrorGroup) Error() string { + result := "" + for i, err := range group.Errors { + if i > 0 { + result += "\n" + } + result += err.Error() + } + return result +} diff --git a/vendor/github.com/googleapis/gnostic/compiler/extension-handler.go b/vendor/github.com/googleapis/gnostic/compiler/extension-handler.go new file mode 100644 index 000000000..1f85b650e --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/compiler/extension-handler.go @@ -0,0 +1,101 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package compiler + +import ( + "bytes" + "fmt" + "os/exec" + + "strings" + + "errors" + + "github.com/golang/protobuf/proto" + "github.com/golang/protobuf/ptypes/any" + ext_plugin "github.com/googleapis/gnostic/extensions" + yaml "gopkg.in/yaml.v2" +) + +// ExtensionHandler describes a binary that is called by the compiler to handle specification extensions. +type ExtensionHandler struct { + Name string +} + +// HandleExtension calls a binary extension handler. +func HandleExtension(context *Context, in interface{}, extensionName string) (bool, *any.Any, error) { + handled := false + var errFromPlugin error + var outFromPlugin *any.Any + + if context != nil && context.ExtensionHandlers != nil && len(*(context.ExtensionHandlers)) != 0 { + for _, customAnyProtoGenerator := range *(context.ExtensionHandlers) { + outFromPlugin, errFromPlugin = customAnyProtoGenerator.handle(in, extensionName) + if outFromPlugin == nil { + continue + } else { + handled = true + break + } + } + } + return handled, outFromPlugin, errFromPlugin +} + +func (extensionHandlers *ExtensionHandler) handle(in interface{}, extensionName string) (*any.Any, error) { + if extensionHandlers.Name != "" { + binary, _ := yaml.Marshal(in) + + request := &ext_plugin.ExtensionHandlerRequest{} + + version := &ext_plugin.Version{} + version.Major = 0 + version.Minor = 1 + version.Patch = 0 + request.CompilerVersion = version + + request.Wrapper = &ext_plugin.Wrapper{} + + request.Wrapper.Version = "v2" + request.Wrapper.Yaml = string(binary) + request.Wrapper.ExtensionName = extensionName + + requestBytes, _ := proto.Marshal(request) + cmd := exec.Command(extensionHandlers.Name) + cmd.Stdin = bytes.NewReader(requestBytes) + output, err := cmd.Output() + + if err != nil { + fmt.Printf("Error: %+v\n", err) + return nil, err + } + response := &ext_plugin.ExtensionHandlerResponse{} + err = proto.Unmarshal(output, response) + if err != nil { + fmt.Printf("Error: %+v\n", err) + fmt.Printf("%s\n", string(output)) + return nil, err + } + if !response.Handled { + return nil, nil + } + if len(response.Error) != 0 { + message := fmt.Sprintf("Errors when parsing: %+v for field %s by vendor extension handler %s. Details %+v", in, extensionName, extensionHandlers.Name, strings.Join(response.Error, ",")) + return nil, errors.New(message) + } + return response.Value, nil + } + return nil, nil +} diff --git a/vendor/github.com/googleapis/gnostic/compiler/helpers.go b/vendor/github.com/googleapis/gnostic/compiler/helpers.go new file mode 100644 index 000000000..76df635ff --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/compiler/helpers.go @@ -0,0 +1,197 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package compiler + +import ( + "fmt" + "gopkg.in/yaml.v2" + "regexp" + "sort" + "strconv" +) + +// compiler helper functions, usually called from generated code + +// UnpackMap gets a yaml.MapSlice if possible. +func UnpackMap(in interface{}) (yaml.MapSlice, bool) { + m, ok := in.(yaml.MapSlice) + if ok { + return m, true + } + // do we have an empty array? + a, ok := in.([]interface{}) + if ok && len(a) == 0 { + // if so, return an empty map + return yaml.MapSlice{}, true + } + return nil, false +} + +// SortedKeysForMap returns the sorted keys of a yaml.MapSlice. +func SortedKeysForMap(m yaml.MapSlice) []string { + keys := make([]string, 0) + for _, item := range m { + keys = append(keys, item.Key.(string)) + } + sort.Strings(keys) + return keys +} + +// MapHasKey returns true if a yaml.MapSlice contains a specified key. +func MapHasKey(m yaml.MapSlice, key string) bool { + for _, item := range m { + itemKey, ok := item.Key.(string) + if ok && key == itemKey { + return true + } + } + return false +} + +// MapValueForKey gets the value of a map value for a specified key. +func MapValueForKey(m yaml.MapSlice, key string) interface{} { + for _, item := range m { + itemKey, ok := item.Key.(string) + if ok && key == itemKey { + return item.Value + } + } + return nil +} + +// ConvertInterfaceArrayToStringArray converts an array of interfaces to an array of strings, if possible. +func ConvertInterfaceArrayToStringArray(interfaceArray []interface{}) []string { + stringArray := make([]string, 0) + for _, item := range interfaceArray { + v, ok := item.(string) + if ok { + stringArray = append(stringArray, v) + } + } + return stringArray +} + +// MissingKeysInMap identifies which keys from a list of required keys are not in a map. +func MissingKeysInMap(m yaml.MapSlice, requiredKeys []string) []string { + missingKeys := make([]string, 0) + for _, k := range requiredKeys { + if !MapHasKey(m, k) { + missingKeys = append(missingKeys, k) + } + } + return missingKeys +} + +// InvalidKeysInMap returns keys in a map that don't match a list of allowed keys and patterns. +func InvalidKeysInMap(m yaml.MapSlice, allowedKeys []string, allowedPatterns []*regexp.Regexp) []string { + invalidKeys := make([]string, 0) + for _, item := range m { + itemKey, ok := item.Key.(string) + if ok { + key := itemKey + found := false + // does the key match an allowed key? + for _, allowedKey := range allowedKeys { + if key == allowedKey { + found = true + break + } + } + if !found { + // does the key match an allowed pattern? + for _, allowedPattern := range allowedPatterns { + if allowedPattern.MatchString(key) { + found = true + break + } + } + if !found { + invalidKeys = append(invalidKeys, key) + } + } + } + } + return invalidKeys +} + +// DescribeMap describes a map (for debugging purposes). +func DescribeMap(in interface{}, indent string) string { + description := "" + m, ok := in.(map[string]interface{}) + if ok { + keys := make([]string, 0) + for k := range m { + keys = append(keys, k) + } + sort.Strings(keys) + for _, k := range keys { + v := m[k] + description += fmt.Sprintf("%s%s:\n", indent, k) + description += DescribeMap(v, indent+" ") + } + return description + } + a, ok := in.([]interface{}) + if ok { + for i, v := range a { + description += fmt.Sprintf("%s%d:\n", indent, i) + description += DescribeMap(v, indent+" ") + } + return description + } + description += fmt.Sprintf("%s%+v\n", indent, in) + return description +} + +// PluralProperties returns the string "properties" pluralized. +func PluralProperties(count int) string { + if count == 1 { + return "property" + } + return "properties" +} + +// StringArrayContainsValue returns true if a string array contains a specified value. +func StringArrayContainsValue(array []string, value string) bool { + for _, item := range array { + if item == value { + return true + } + } + return false +} + +// StringArrayContainsValues returns true if a string array contains all of a list of specified values. +func StringArrayContainsValues(array []string, values []string) bool { + for _, value := range values { + if !StringArrayContainsValue(array, value) { + return false + } + } + return true +} + +// StringValue returns the string value of an item. +func StringValue(item interface{}) (value string, ok bool) { + value, ok = item.(string) + if ok { + return value, ok + } + intValue, ok := item.(int) + if ok { + return strconv.Itoa(intValue), true + } + return "", false +} diff --git a/vendor/github.com/googleapis/gnostic/compiler/main.go b/vendor/github.com/googleapis/gnostic/compiler/main.go new file mode 100644 index 000000000..9713a21cc --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/compiler/main.go @@ -0,0 +1,16 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package compiler provides support functions to generated compiler code. +package compiler diff --git a/vendor/github.com/googleapis/gnostic/compiler/reader.go b/vendor/github.com/googleapis/gnostic/compiler/reader.go new file mode 100644 index 000000000..2d4b3303d --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/compiler/reader.go @@ -0,0 +1,173 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package compiler + +import ( + "errors" + "fmt" + "gopkg.in/yaml.v2" + "io/ioutil" + "log" + "net/http" + "net/url" + "path/filepath" + "strings" +) + +var fileCache map[string][]byte +var infoCache map[string]interface{} +var count int64 + +var verboseReader = false + +func initializeFileCache() { + if fileCache == nil { + fileCache = make(map[string][]byte, 0) + } +} + +func initializeInfoCache() { + if infoCache == nil { + infoCache = make(map[string]interface{}, 0) + } +} + +// FetchFile gets a specified file from the local filesystem or a remote location. +func FetchFile(fileurl string) ([]byte, error) { + initializeFileCache() + bytes, ok := fileCache[fileurl] + if ok { + if verboseReader { + log.Printf("Cache hit %s", fileurl) + } + return bytes, nil + } + if verboseReader { + log.Printf("Fetching %s", fileurl) + } + response, err := http.Get(fileurl) + if err != nil { + return nil, err + } + if response.StatusCode != 200 { + return nil, errors.New(fmt.Sprintf("Error downloading %s: %s", fileurl, response.Status)) + } + defer response.Body.Close() + bytes, err = ioutil.ReadAll(response.Body) + if err == nil { + fileCache[fileurl] = bytes + } + return bytes, err +} + +// ReadBytesForFile reads the bytes of a file. +func ReadBytesForFile(filename string) ([]byte, error) { + // is the filename a url? + fileurl, _ := url.Parse(filename) + if fileurl.Scheme != "" { + // yes, fetch it + bytes, err := FetchFile(filename) + if err != nil { + return nil, err + } + return bytes, nil + } + // no, it's a local filename + bytes, err := ioutil.ReadFile(filename) + if err != nil { + return nil, err + } + return bytes, nil +} + +// ReadInfoFromBytes unmarshals a file as a yaml.MapSlice. +func ReadInfoFromBytes(filename string, bytes []byte) (interface{}, error) { + initializeInfoCache() + cachedInfo, ok := infoCache[filename] + if ok { + if verboseReader { + log.Printf("Cache hit info for file %s", filename) + } + return cachedInfo, nil + } + if verboseReader { + log.Printf("Reading info for file %s", filename) + } + var info yaml.MapSlice + err := yaml.Unmarshal(bytes, &info) + if err != nil { + return nil, err + } + infoCache[filename] = info + return info, nil +} + +// ReadInfoForRef reads a file and return the fragment needed to resolve a $ref. +func ReadInfoForRef(basefile string, ref string) (interface{}, error) { + initializeInfoCache() + { + info, ok := infoCache[ref] + if ok { + if verboseReader { + log.Printf("Cache hit for ref %s#%s", basefile, ref) + } + return info, nil + } + } + if verboseReader { + log.Printf("Reading info for ref %s#%s", basefile, ref) + } + count = count + 1 + basedir, _ := filepath.Split(basefile) + parts := strings.Split(ref, "#") + var filename string + if parts[0] != "" { + filename = basedir + parts[0] + } else { + filename = basefile + } + bytes, err := ReadBytesForFile(filename) + if err != nil { + return nil, err + } + info, err := ReadInfoFromBytes(filename, bytes) + if err != nil { + log.Printf("File error: %v\n", err) + } else { + if len(parts) > 1 { + path := strings.Split(parts[1], "/") + for i, key := range path { + if i > 0 { + m, ok := info.(yaml.MapSlice) + if ok { + found := false + for _, section := range m { + if section.Key == key { + info = section.Value + found = true + } + } + if !found { + infoCache[ref] = nil + return nil, NewError(nil, fmt.Sprintf("could not resolve %s", ref)) + } + } + } + } + } + } + infoCache[ref] = info + return info, nil +} diff --git a/vendor/github.com/googleapis/gnostic/discovery/README.md b/vendor/github.com/googleapis/gnostic/discovery/README.md new file mode 100644 index 000000000..2237e24b1 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/discovery/README.md @@ -0,0 +1,16 @@ +# API Discovery Format + +This directory contains a Protocol Buffer-language model +and related code for supporting Google's API Discovery Format. + +Gnostic applications and plugins can use Discovery.proto +to generate Protocol Buffer support code for their preferred languages. + +Discovery.go is used by Gnostic to read JSON and YAML Discovery +descriptions into the Protocol Buffer-based datastructures +generated from Discovery.proto. + +Discovery.proto and Discovery.go are generated by the Gnostic +compiler generator, and Discovery.pb.go is generated by +protoc, the Protocol Buffer compiler, and protoc-gen-go, the +Protocol Buffer Go code generation plugin. diff --git a/vendor/github.com/googleapis/gnostic/discovery/discovery.go b/vendor/github.com/googleapis/gnostic/discovery/discovery.go new file mode 100644 index 000000000..6e2788028 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/discovery/discovery.go @@ -0,0 +1,2692 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// THIS FILE IS AUTOMATICALLY GENERATED. + +package discovery_v1 + +import ( + "fmt" + "github.com/googleapis/gnostic/compiler" + "gopkg.in/yaml.v2" + "regexp" + "strings" +) + +// Version returns the package name (and OpenAPI version). +func Version() string { + return "discovery_v1" +} + +// NewAnnotations creates an object of type Annotations if possible, returning an error if not. +func NewAnnotations(in interface{}, context *compiler.Context) (*Annotations, error) { + errors := make([]error, 0) + x := &Annotations{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"required"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // repeated string required = 1; + v1 := compiler.MapValueForKey(m, "required") + if v1 != nil { + v, ok := v1.([]interface{}) + if ok { + x.Required = compiler.ConvertInterfaceArrayToStringArray(v) + } else { + message := fmt.Sprintf("has unexpected value for required: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewAny creates an object of type Any if possible, returning an error if not. +func NewAny(in interface{}, context *compiler.Context) (*Any, error) { + errors := make([]error, 0) + x := &Any{} + bytes, _ := yaml.Marshal(in) + x.Yaml = string(bytes) + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewAuth creates an object of type Auth if possible, returning an error if not. +func NewAuth(in interface{}, context *compiler.Context) (*Auth, error) { + errors := make([]error, 0) + x := &Auth{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"oauth2"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // Oauth2 oauth2 = 1; + v1 := compiler.MapValueForKey(m, "oauth2") + if v1 != nil { + var err error + x.Oauth2, err = NewOauth2(v1, compiler.NewContext("oauth2", context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewDocument creates an object of type Document if possible, returning an error if not. +func NewDocument(in interface{}, context *compiler.Context) (*Document, error) { + errors := make([]error, 0) + x := &Document{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"discoveryVersion", "kind"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"auth", "basePath", "baseUrl", "batchPath", "canonicalName", "description", "discoveryVersion", "documentationLink", "etag", "features", "fullyEncodeReservedExpansion", "icons", "id", "kind", "labels", "methods", "name", "ownerDomain", "ownerName", "packagePath", "parameters", "protocol", "resources", "revision", "rootUrl", "schemas", "servicePath", "title", "version", "version_module"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string kind = 1; + v1 := compiler.MapValueForKey(m, "kind") + if v1 != nil { + x.Kind, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for kind: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string discovery_version = 2; + v2 := compiler.MapValueForKey(m, "discoveryVersion") + if v2 != nil { + x.DiscoveryVersion, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for discoveryVersion: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string id = 3; + v3 := compiler.MapValueForKey(m, "id") + if v3 != nil { + x.Id, ok = v3.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for id: %+v (%T)", v3, v3) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string name = 4; + v4 := compiler.MapValueForKey(m, "name") + if v4 != nil { + x.Name, ok = v4.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v4, v4) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string version = 5; + v5 := compiler.MapValueForKey(m, "version") + if v5 != nil { + x.Version, ok = v5.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for version: %+v (%T)", v5, v5) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string revision = 6; + v6 := compiler.MapValueForKey(m, "revision") + if v6 != nil { + x.Revision, ok = v6.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for revision: %+v (%T)", v6, v6) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string title = 7; + v7 := compiler.MapValueForKey(m, "title") + if v7 != nil { + x.Title, ok = v7.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for title: %+v (%T)", v7, v7) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 8; + v8 := compiler.MapValueForKey(m, "description") + if v8 != nil { + x.Description, ok = v8.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v8, v8) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Icons icons = 9; + v9 := compiler.MapValueForKey(m, "icons") + if v9 != nil { + var err error + x.Icons, err = NewIcons(v9, compiler.NewContext("icons", context)) + if err != nil { + errors = append(errors, err) + } + } + // string documentation_link = 10; + v10 := compiler.MapValueForKey(m, "documentationLink") + if v10 != nil { + x.DocumentationLink, ok = v10.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for documentationLink: %+v (%T)", v10, v10) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated string labels = 11; + v11 := compiler.MapValueForKey(m, "labels") + if v11 != nil { + v, ok := v11.([]interface{}) + if ok { + x.Labels = compiler.ConvertInterfaceArrayToStringArray(v) + } else { + message := fmt.Sprintf("has unexpected value for labels: %+v (%T)", v11, v11) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string protocol = 12; + v12 := compiler.MapValueForKey(m, "protocol") + if v12 != nil { + x.Protocol, ok = v12.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for protocol: %+v (%T)", v12, v12) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string base_url = 13; + v13 := compiler.MapValueForKey(m, "baseUrl") + if v13 != nil { + x.BaseUrl, ok = v13.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for baseUrl: %+v (%T)", v13, v13) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string base_path = 14; + v14 := compiler.MapValueForKey(m, "basePath") + if v14 != nil { + x.BasePath, ok = v14.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for basePath: %+v (%T)", v14, v14) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string root_url = 15; + v15 := compiler.MapValueForKey(m, "rootUrl") + if v15 != nil { + x.RootUrl, ok = v15.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for rootUrl: %+v (%T)", v15, v15) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string service_path = 16; + v16 := compiler.MapValueForKey(m, "servicePath") + if v16 != nil { + x.ServicePath, ok = v16.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for servicePath: %+v (%T)", v16, v16) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string batch_path = 17; + v17 := compiler.MapValueForKey(m, "batchPath") + if v17 != nil { + x.BatchPath, ok = v17.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for batchPath: %+v (%T)", v17, v17) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Parameters parameters = 18; + v18 := compiler.MapValueForKey(m, "parameters") + if v18 != nil { + var err error + x.Parameters, err = NewParameters(v18, compiler.NewContext("parameters", context)) + if err != nil { + errors = append(errors, err) + } + } + // Auth auth = 19; + v19 := compiler.MapValueForKey(m, "auth") + if v19 != nil { + var err error + x.Auth, err = NewAuth(v19, compiler.NewContext("auth", context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated string features = 20; + v20 := compiler.MapValueForKey(m, "features") + if v20 != nil { + v, ok := v20.([]interface{}) + if ok { + x.Features = compiler.ConvertInterfaceArrayToStringArray(v) + } else { + message := fmt.Sprintf("has unexpected value for features: %+v (%T)", v20, v20) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Schemas schemas = 21; + v21 := compiler.MapValueForKey(m, "schemas") + if v21 != nil { + var err error + x.Schemas, err = NewSchemas(v21, compiler.NewContext("schemas", context)) + if err != nil { + errors = append(errors, err) + } + } + // Methods methods = 22; + v22 := compiler.MapValueForKey(m, "methods") + if v22 != nil { + var err error + x.Methods, err = NewMethods(v22, compiler.NewContext("methods", context)) + if err != nil { + errors = append(errors, err) + } + } + // Resources resources = 23; + v23 := compiler.MapValueForKey(m, "resources") + if v23 != nil { + var err error + x.Resources, err = NewResources(v23, compiler.NewContext("resources", context)) + if err != nil { + errors = append(errors, err) + } + } + // string etag = 24; + v24 := compiler.MapValueForKey(m, "etag") + if v24 != nil { + x.Etag, ok = v24.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for etag: %+v (%T)", v24, v24) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string owner_domain = 25; + v25 := compiler.MapValueForKey(m, "ownerDomain") + if v25 != nil { + x.OwnerDomain, ok = v25.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for ownerDomain: %+v (%T)", v25, v25) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string owner_name = 26; + v26 := compiler.MapValueForKey(m, "ownerName") + if v26 != nil { + x.OwnerName, ok = v26.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for ownerName: %+v (%T)", v26, v26) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool version_module = 27; + v27 := compiler.MapValueForKey(m, "version_module") + if v27 != nil { + x.VersionModule, ok = v27.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for version_module: %+v (%T)", v27, v27) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string canonical_name = 28; + v28 := compiler.MapValueForKey(m, "canonicalName") + if v28 != nil { + x.CanonicalName, ok = v28.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for canonicalName: %+v (%T)", v28, v28) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool fully_encode_reserved_expansion = 29; + v29 := compiler.MapValueForKey(m, "fullyEncodeReservedExpansion") + if v29 != nil { + x.FullyEncodeReservedExpansion, ok = v29.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for fullyEncodeReservedExpansion: %+v (%T)", v29, v29) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string package_path = 30; + v30 := compiler.MapValueForKey(m, "packagePath") + if v30 != nil { + x.PackagePath, ok = v30.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for packagePath: %+v (%T)", v30, v30) + errors = append(errors, compiler.NewError(context, message)) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewIcons creates an object of type Icons if possible, returning an error if not. +func NewIcons(in interface{}, context *compiler.Context) (*Icons, error) { + errors := make([]error, 0) + x := &Icons{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"x16", "x32"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"x16", "x32"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string x16 = 1; + v1 := compiler.MapValueForKey(m, "x16") + if v1 != nil { + x.X16, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for x16: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string x32 = 2; + v2 := compiler.MapValueForKey(m, "x32") + if v2 != nil { + x.X32, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for x32: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewMediaUpload creates an object of type MediaUpload if possible, returning an error if not. +func NewMediaUpload(in interface{}, context *compiler.Context) (*MediaUpload, error) { + errors := make([]error, 0) + x := &MediaUpload{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"accept", "maxSize", "protocols", "supportsSubscription"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // repeated string accept = 1; + v1 := compiler.MapValueForKey(m, "accept") + if v1 != nil { + v, ok := v1.([]interface{}) + if ok { + x.Accept = compiler.ConvertInterfaceArrayToStringArray(v) + } else { + message := fmt.Sprintf("has unexpected value for accept: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string max_size = 2; + v2 := compiler.MapValueForKey(m, "maxSize") + if v2 != nil { + x.MaxSize, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for maxSize: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Protocols protocols = 3; + v3 := compiler.MapValueForKey(m, "protocols") + if v3 != nil { + var err error + x.Protocols, err = NewProtocols(v3, compiler.NewContext("protocols", context)) + if err != nil { + errors = append(errors, err) + } + } + // bool supports_subscription = 4; + v4 := compiler.MapValueForKey(m, "supportsSubscription") + if v4 != nil { + x.SupportsSubscription, ok = v4.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for supportsSubscription: %+v (%T)", v4, v4) + errors = append(errors, compiler.NewError(context, message)) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewMethod creates an object of type Method if possible, returning an error if not. +func NewMethod(in interface{}, context *compiler.Context) (*Method, error) { + errors := make([]error, 0) + x := &Method{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"description", "etagRequired", "flatPath", "httpMethod", "id", "mediaUpload", "parameterOrder", "parameters", "path", "request", "response", "scopes", "supportsMediaDownload", "supportsMediaUpload", "supportsSubscription", "useMediaDownloadService"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string id = 1; + v1 := compiler.MapValueForKey(m, "id") + if v1 != nil { + x.Id, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for id: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string path = 2; + v2 := compiler.MapValueForKey(m, "path") + if v2 != nil { + x.Path, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for path: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string http_method = 3; + v3 := compiler.MapValueForKey(m, "httpMethod") + if v3 != nil { + x.HttpMethod, ok = v3.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for httpMethod: %+v (%T)", v3, v3) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 4; + v4 := compiler.MapValueForKey(m, "description") + if v4 != nil { + x.Description, ok = v4.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v4, v4) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Parameters parameters = 5; + v5 := compiler.MapValueForKey(m, "parameters") + if v5 != nil { + var err error + x.Parameters, err = NewParameters(v5, compiler.NewContext("parameters", context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated string parameter_order = 6; + v6 := compiler.MapValueForKey(m, "parameterOrder") + if v6 != nil { + v, ok := v6.([]interface{}) + if ok { + x.ParameterOrder = compiler.ConvertInterfaceArrayToStringArray(v) + } else { + message := fmt.Sprintf("has unexpected value for parameterOrder: %+v (%T)", v6, v6) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Request request = 7; + v7 := compiler.MapValueForKey(m, "request") + if v7 != nil { + var err error + x.Request, err = NewRequest(v7, compiler.NewContext("request", context)) + if err != nil { + errors = append(errors, err) + } + } + // Response response = 8; + v8 := compiler.MapValueForKey(m, "response") + if v8 != nil { + var err error + x.Response, err = NewResponse(v8, compiler.NewContext("response", context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated string scopes = 9; + v9 := compiler.MapValueForKey(m, "scopes") + if v9 != nil { + v, ok := v9.([]interface{}) + if ok { + x.Scopes = compiler.ConvertInterfaceArrayToStringArray(v) + } else { + message := fmt.Sprintf("has unexpected value for scopes: %+v (%T)", v9, v9) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool supports_media_download = 10; + v10 := compiler.MapValueForKey(m, "supportsMediaDownload") + if v10 != nil { + x.SupportsMediaDownload, ok = v10.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for supportsMediaDownload: %+v (%T)", v10, v10) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool supports_media_upload = 11; + v11 := compiler.MapValueForKey(m, "supportsMediaUpload") + if v11 != nil { + x.SupportsMediaUpload, ok = v11.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for supportsMediaUpload: %+v (%T)", v11, v11) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool use_media_download_service = 12; + v12 := compiler.MapValueForKey(m, "useMediaDownloadService") + if v12 != nil { + x.UseMediaDownloadService, ok = v12.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for useMediaDownloadService: %+v (%T)", v12, v12) + errors = append(errors, compiler.NewError(context, message)) + } + } + // MediaUpload media_upload = 13; + v13 := compiler.MapValueForKey(m, "mediaUpload") + if v13 != nil { + var err error + x.MediaUpload, err = NewMediaUpload(v13, compiler.NewContext("mediaUpload", context)) + if err != nil { + errors = append(errors, err) + } + } + // bool supports_subscription = 14; + v14 := compiler.MapValueForKey(m, "supportsSubscription") + if v14 != nil { + x.SupportsSubscription, ok = v14.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for supportsSubscription: %+v (%T)", v14, v14) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string flat_path = 15; + v15 := compiler.MapValueForKey(m, "flatPath") + if v15 != nil { + x.FlatPath, ok = v15.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for flatPath: %+v (%T)", v15, v15) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool etag_required = 16; + v16 := compiler.MapValueForKey(m, "etagRequired") + if v16 != nil { + x.EtagRequired, ok = v16.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for etagRequired: %+v (%T)", v16, v16) + errors = append(errors, compiler.NewError(context, message)) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewMethods creates an object of type Methods if possible, returning an error if not. +func NewMethods(in interface{}, context *compiler.Context) (*Methods, error) { + errors := make([]error, 0) + x := &Methods{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedMethod additional_properties = 1; + // MAP: Method + x.AdditionalProperties = make([]*NamedMethod, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + pair := &NamedMethod{} + pair.Name = k + var err error + pair.Value, err = NewMethod(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedMethod creates an object of type NamedMethod if possible, returning an error if not. +func NewNamedMethod(in interface{}, context *compiler.Context) (*NamedMethod, error) { + errors := make([]error, 0) + x := &NamedMethod{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Method value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewMethod(v2, compiler.NewContext("value", context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedParameter creates an object of type NamedParameter if possible, returning an error if not. +func NewNamedParameter(in interface{}, context *compiler.Context) (*NamedParameter, error) { + errors := make([]error, 0) + x := &NamedParameter{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Parameter value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewParameter(v2, compiler.NewContext("value", context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedResource creates an object of type NamedResource if possible, returning an error if not. +func NewNamedResource(in interface{}, context *compiler.Context) (*NamedResource, error) { + errors := make([]error, 0) + x := &NamedResource{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Resource value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewResource(v2, compiler.NewContext("value", context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedSchema creates an object of type NamedSchema if possible, returning an error if not. +func NewNamedSchema(in interface{}, context *compiler.Context) (*NamedSchema, error) { + errors := make([]error, 0) + x := &NamedSchema{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Schema value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewSchema(v2, compiler.NewContext("value", context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedScope creates an object of type NamedScope if possible, returning an error if not. +func NewNamedScope(in interface{}, context *compiler.Context) (*NamedScope, error) { + errors := make([]error, 0) + x := &NamedScope{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Scope value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewScope(v2, compiler.NewContext("value", context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewOauth2 creates an object of type Oauth2 if possible, returning an error if not. +func NewOauth2(in interface{}, context *compiler.Context) (*Oauth2, error) { + errors := make([]error, 0) + x := &Oauth2{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"scopes"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // Scopes scopes = 1; + v1 := compiler.MapValueForKey(m, "scopes") + if v1 != nil { + var err error + x.Scopes, err = NewScopes(v1, compiler.NewContext("scopes", context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewParameter creates an object of type Parameter if possible, returning an error if not. +func NewParameter(in interface{}, context *compiler.Context) (*Parameter, error) { + errors := make([]error, 0) + x := &Parameter{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"$ref", "additionalProperties", "annotations", "default", "description", "enum", "enumDescriptions", "format", "id", "items", "location", "maximum", "minimum", "pattern", "properties", "repeated", "required", "type"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string id = 1; + v1 := compiler.MapValueForKey(m, "id") + if v1 != nil { + x.Id, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for id: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string type = 2; + v2 := compiler.MapValueForKey(m, "type") + if v2 != nil { + x.Type, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string _ref = 3; + v3 := compiler.MapValueForKey(m, "$ref") + if v3 != nil { + x.XRef, ok = v3.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for $ref: %+v (%T)", v3, v3) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 4; + v4 := compiler.MapValueForKey(m, "description") + if v4 != nil { + x.Description, ok = v4.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v4, v4) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string default = 5; + v5 := compiler.MapValueForKey(m, "default") + if v5 != nil { + x.Default, ok = v5.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for default: %+v (%T)", v5, v5) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool required = 6; + v6 := compiler.MapValueForKey(m, "required") + if v6 != nil { + x.Required, ok = v6.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for required: %+v (%T)", v6, v6) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string format = 7; + v7 := compiler.MapValueForKey(m, "format") + if v7 != nil { + x.Format, ok = v7.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for format: %+v (%T)", v7, v7) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string pattern = 8; + v8 := compiler.MapValueForKey(m, "pattern") + if v8 != nil { + x.Pattern, ok = v8.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for pattern: %+v (%T)", v8, v8) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string minimum = 9; + v9 := compiler.MapValueForKey(m, "minimum") + if v9 != nil { + x.Minimum, ok = v9.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for minimum: %+v (%T)", v9, v9) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string maximum = 10; + v10 := compiler.MapValueForKey(m, "maximum") + if v10 != nil { + x.Maximum, ok = v10.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for maximum: %+v (%T)", v10, v10) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated string enum = 11; + v11 := compiler.MapValueForKey(m, "enum") + if v11 != nil { + v, ok := v11.([]interface{}) + if ok { + x.Enum = compiler.ConvertInterfaceArrayToStringArray(v) + } else { + message := fmt.Sprintf("has unexpected value for enum: %+v (%T)", v11, v11) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated string enum_descriptions = 12; + v12 := compiler.MapValueForKey(m, "enumDescriptions") + if v12 != nil { + v, ok := v12.([]interface{}) + if ok { + x.EnumDescriptions = compiler.ConvertInterfaceArrayToStringArray(v) + } else { + message := fmt.Sprintf("has unexpected value for enumDescriptions: %+v (%T)", v12, v12) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool repeated = 13; + v13 := compiler.MapValueForKey(m, "repeated") + if v13 != nil { + x.Repeated, ok = v13.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for repeated: %+v (%T)", v13, v13) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string location = 14; + v14 := compiler.MapValueForKey(m, "location") + if v14 != nil { + x.Location, ok = v14.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for location: %+v (%T)", v14, v14) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Schemas properties = 15; + v15 := compiler.MapValueForKey(m, "properties") + if v15 != nil { + var err error + x.Properties, err = NewSchemas(v15, compiler.NewContext("properties", context)) + if err != nil { + errors = append(errors, err) + } + } + // Schema additional_properties = 16; + v16 := compiler.MapValueForKey(m, "additionalProperties") + if v16 != nil { + var err error + x.AdditionalProperties, err = NewSchema(v16, compiler.NewContext("additionalProperties", context)) + if err != nil { + errors = append(errors, err) + } + } + // Schema items = 17; + v17 := compiler.MapValueForKey(m, "items") + if v17 != nil { + var err error + x.Items, err = NewSchema(v17, compiler.NewContext("items", context)) + if err != nil { + errors = append(errors, err) + } + } + // Annotations annotations = 18; + v18 := compiler.MapValueForKey(m, "annotations") + if v18 != nil { + var err error + x.Annotations, err = NewAnnotations(v18, compiler.NewContext("annotations", context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewParameters creates an object of type Parameters if possible, returning an error if not. +func NewParameters(in interface{}, context *compiler.Context) (*Parameters, error) { + errors := make([]error, 0) + x := &Parameters{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedParameter additional_properties = 1; + // MAP: Parameter + x.AdditionalProperties = make([]*NamedParameter, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + pair := &NamedParameter{} + pair.Name = k + var err error + pair.Value, err = NewParameter(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewProtocols creates an object of type Protocols if possible, returning an error if not. +func NewProtocols(in interface{}, context *compiler.Context) (*Protocols, error) { + errors := make([]error, 0) + x := &Protocols{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"resumable", "simple"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // Simple simple = 1; + v1 := compiler.MapValueForKey(m, "simple") + if v1 != nil { + var err error + x.Simple, err = NewSimple(v1, compiler.NewContext("simple", context)) + if err != nil { + errors = append(errors, err) + } + } + // Resumable resumable = 2; + v2 := compiler.MapValueForKey(m, "resumable") + if v2 != nil { + var err error + x.Resumable, err = NewResumable(v2, compiler.NewContext("resumable", context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewRequest creates an object of type Request if possible, returning an error if not. +func NewRequest(in interface{}, context *compiler.Context) (*Request, error) { + errors := make([]error, 0) + x := &Request{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"$ref", "parameterName"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string _ref = 1; + v1 := compiler.MapValueForKey(m, "$ref") + if v1 != nil { + x.XRef, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for $ref: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string parameter_name = 2; + v2 := compiler.MapValueForKey(m, "parameterName") + if v2 != nil { + x.ParameterName, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for parameterName: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewResource creates an object of type Resource if possible, returning an error if not. +func NewResource(in interface{}, context *compiler.Context) (*Resource, error) { + errors := make([]error, 0) + x := &Resource{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"methods", "resources"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // Methods methods = 1; + v1 := compiler.MapValueForKey(m, "methods") + if v1 != nil { + var err error + x.Methods, err = NewMethods(v1, compiler.NewContext("methods", context)) + if err != nil { + errors = append(errors, err) + } + } + // Resources resources = 2; + v2 := compiler.MapValueForKey(m, "resources") + if v2 != nil { + var err error + x.Resources, err = NewResources(v2, compiler.NewContext("resources", context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewResources creates an object of type Resources if possible, returning an error if not. +func NewResources(in interface{}, context *compiler.Context) (*Resources, error) { + errors := make([]error, 0) + x := &Resources{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedResource additional_properties = 1; + // MAP: Resource + x.AdditionalProperties = make([]*NamedResource, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + pair := &NamedResource{} + pair.Name = k + var err error + pair.Value, err = NewResource(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewResponse creates an object of type Response if possible, returning an error if not. +func NewResponse(in interface{}, context *compiler.Context) (*Response, error) { + errors := make([]error, 0) + x := &Response{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"$ref"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string _ref = 1; + v1 := compiler.MapValueForKey(m, "$ref") + if v1 != nil { + x.XRef, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for $ref: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewResumable creates an object of type Resumable if possible, returning an error if not. +func NewResumable(in interface{}, context *compiler.Context) (*Resumable, error) { + errors := make([]error, 0) + x := &Resumable{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"multipart", "path"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // bool multipart = 1; + v1 := compiler.MapValueForKey(m, "multipart") + if v1 != nil { + x.Multipart, ok = v1.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for multipart: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string path = 2; + v2 := compiler.MapValueForKey(m, "path") + if v2 != nil { + x.Path, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for path: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewSchema creates an object of type Schema if possible, returning an error if not. +func NewSchema(in interface{}, context *compiler.Context) (*Schema, error) { + errors := make([]error, 0) + x := &Schema{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"$ref", "additionalProperties", "annotations", "default", "description", "enum", "enumDescriptions", "format", "id", "items", "location", "maximum", "minimum", "pattern", "properties", "readOnly", "repeated", "required", "type"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string id = 1; + v1 := compiler.MapValueForKey(m, "id") + if v1 != nil { + x.Id, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for id: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string type = 2; + v2 := compiler.MapValueForKey(m, "type") + if v2 != nil { + x.Type, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 3; + v3 := compiler.MapValueForKey(m, "description") + if v3 != nil { + x.Description, ok = v3.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v3, v3) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string default = 4; + v4 := compiler.MapValueForKey(m, "default") + if v4 != nil { + x.Default, ok = v4.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for default: %+v (%T)", v4, v4) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool required = 5; + v5 := compiler.MapValueForKey(m, "required") + if v5 != nil { + x.Required, ok = v5.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for required: %+v (%T)", v5, v5) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string format = 6; + v6 := compiler.MapValueForKey(m, "format") + if v6 != nil { + x.Format, ok = v6.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for format: %+v (%T)", v6, v6) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string pattern = 7; + v7 := compiler.MapValueForKey(m, "pattern") + if v7 != nil { + x.Pattern, ok = v7.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for pattern: %+v (%T)", v7, v7) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string minimum = 8; + v8 := compiler.MapValueForKey(m, "minimum") + if v8 != nil { + x.Minimum, ok = v8.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for minimum: %+v (%T)", v8, v8) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string maximum = 9; + v9 := compiler.MapValueForKey(m, "maximum") + if v9 != nil { + x.Maximum, ok = v9.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for maximum: %+v (%T)", v9, v9) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated string enum = 10; + v10 := compiler.MapValueForKey(m, "enum") + if v10 != nil { + v, ok := v10.([]interface{}) + if ok { + x.Enum = compiler.ConvertInterfaceArrayToStringArray(v) + } else { + message := fmt.Sprintf("has unexpected value for enum: %+v (%T)", v10, v10) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated string enum_descriptions = 11; + v11 := compiler.MapValueForKey(m, "enumDescriptions") + if v11 != nil { + v, ok := v11.([]interface{}) + if ok { + x.EnumDescriptions = compiler.ConvertInterfaceArrayToStringArray(v) + } else { + message := fmt.Sprintf("has unexpected value for enumDescriptions: %+v (%T)", v11, v11) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool repeated = 12; + v12 := compiler.MapValueForKey(m, "repeated") + if v12 != nil { + x.Repeated, ok = v12.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for repeated: %+v (%T)", v12, v12) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string location = 13; + v13 := compiler.MapValueForKey(m, "location") + if v13 != nil { + x.Location, ok = v13.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for location: %+v (%T)", v13, v13) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Schemas properties = 14; + v14 := compiler.MapValueForKey(m, "properties") + if v14 != nil { + var err error + x.Properties, err = NewSchemas(v14, compiler.NewContext("properties", context)) + if err != nil { + errors = append(errors, err) + } + } + // Schema additional_properties = 15; + v15 := compiler.MapValueForKey(m, "additionalProperties") + if v15 != nil { + var err error + x.AdditionalProperties, err = NewSchema(v15, compiler.NewContext("additionalProperties", context)) + if err != nil { + errors = append(errors, err) + } + } + // Schema items = 16; + v16 := compiler.MapValueForKey(m, "items") + if v16 != nil { + var err error + x.Items, err = NewSchema(v16, compiler.NewContext("items", context)) + if err != nil { + errors = append(errors, err) + } + } + // string _ref = 17; + v17 := compiler.MapValueForKey(m, "$ref") + if v17 != nil { + x.XRef, ok = v17.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for $ref: %+v (%T)", v17, v17) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Annotations annotations = 18; + v18 := compiler.MapValueForKey(m, "annotations") + if v18 != nil { + var err error + x.Annotations, err = NewAnnotations(v18, compiler.NewContext("annotations", context)) + if err != nil { + errors = append(errors, err) + } + } + // bool read_only = 19; + v19 := compiler.MapValueForKey(m, "readOnly") + if v19 != nil { + x.ReadOnly, ok = v19.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for readOnly: %+v (%T)", v19, v19) + errors = append(errors, compiler.NewError(context, message)) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewSchemas creates an object of type Schemas if possible, returning an error if not. +func NewSchemas(in interface{}, context *compiler.Context) (*Schemas, error) { + errors := make([]error, 0) + x := &Schemas{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedSchema additional_properties = 1; + // MAP: Schema + x.AdditionalProperties = make([]*NamedSchema, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + pair := &NamedSchema{} + pair.Name = k + var err error + pair.Value, err = NewSchema(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewScope creates an object of type Scope if possible, returning an error if not. +func NewScope(in interface{}, context *compiler.Context) (*Scope, error) { + errors := make([]error, 0) + x := &Scope{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"description"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string description = 1; + v1 := compiler.MapValueForKey(m, "description") + if v1 != nil { + x.Description, ok = v1.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewScopes creates an object of type Scopes if possible, returning an error if not. +func NewScopes(in interface{}, context *compiler.Context) (*Scopes, error) { + errors := make([]error, 0) + x := &Scopes{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedScope additional_properties = 1; + // MAP: Scope + x.AdditionalProperties = make([]*NamedScope, 0) + for _, item := range m { + k, ok := compiler.StringValue(item.Key) + if ok { + v := item.Value + pair := &NamedScope{} + pair.Name = k + var err error + pair.Value, err = NewScope(v, compiler.NewContext(k, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewSimple creates an object of type Simple if possible, returning an error if not. +func NewSimple(in interface{}, context *compiler.Context) (*Simple, error) { + errors := make([]error, 0) + x := &Simple{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"multipart", "path"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // bool multipart = 1; + v1 := compiler.MapValueForKey(m, "multipart") + if v1 != nil { + x.Multipart, ok = v1.(bool) + if !ok { + message := fmt.Sprintf("has unexpected value for multipart: %+v (%T)", v1, v1) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string path = 2; + v2 := compiler.MapValueForKey(m, "path") + if v2 != nil { + x.Path, ok = v2.(string) + if !ok { + message := fmt.Sprintf("has unexpected value for path: %+v (%T)", v2, v2) + errors = append(errors, compiler.NewError(context, message)) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewStringArray creates an object of type StringArray if possible, returning an error if not. +func NewStringArray(in interface{}, context *compiler.Context) (*StringArray, error) { + errors := make([]error, 0) + x := &StringArray{} + a, ok := in.([]interface{}) + if !ok { + message := fmt.Sprintf("has unexpected value for StringArray: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + x.Value = make([]string, 0) + for _, s := range a { + x.Value = append(x.Value, s.(string)) + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Annotations objects. +func (m *Annotations) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Any objects. +func (m *Any) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Auth objects. +func (m *Auth) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Oauth2 != nil { + _, err := m.Oauth2.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Document objects. +func (m *Document) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Icons != nil { + _, err := m.Icons.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Parameters != nil { + _, err := m.Parameters.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Auth != nil { + _, err := m.Auth.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Schemas != nil { + _, err := m.Schemas.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Methods != nil { + _, err := m.Methods.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Resources != nil { + _, err := m.Resources.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Icons objects. +func (m *Icons) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside MediaUpload objects. +func (m *MediaUpload) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Protocols != nil { + _, err := m.Protocols.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Method objects. +func (m *Method) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Parameters != nil { + _, err := m.Parameters.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Request != nil { + _, err := m.Request.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Response != nil { + _, err := m.Response.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.MediaUpload != nil { + _, err := m.MediaUpload.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Methods objects. +func (m *Methods) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedMethod objects. +func (m *NamedMethod) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedParameter objects. +func (m *NamedParameter) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedResource objects. +func (m *NamedResource) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedSchema objects. +func (m *NamedSchema) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedScope objects. +func (m *NamedScope) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Oauth2 objects. +func (m *Oauth2) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Scopes != nil { + _, err := m.Scopes.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Parameter objects. +func (m *Parameter) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.XRef != "" { + info, err := compiler.ReadInfoForRef(root, m.XRef) + if err != nil { + return nil, err + } + if info != nil { + replacement, err := NewParameter(info, nil) + if err == nil { + *m = *replacement + return m.ResolveReferences(root) + } + } + return info, nil + } + if m.Properties != nil { + _, err := m.Properties.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.AdditionalProperties != nil { + _, err := m.AdditionalProperties.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Items != nil { + _, err := m.Items.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Annotations != nil { + _, err := m.Annotations.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Parameters objects. +func (m *Parameters) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Protocols objects. +func (m *Protocols) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Simple != nil { + _, err := m.Simple.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Resumable != nil { + _, err := m.Resumable.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Request objects. +func (m *Request) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.XRef != "" { + info, err := compiler.ReadInfoForRef(root, m.XRef) + if err != nil { + return nil, err + } + if info != nil { + replacement, err := NewRequest(info, nil) + if err == nil { + *m = *replacement + return m.ResolveReferences(root) + } + } + return info, nil + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Resource objects. +func (m *Resource) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Methods != nil { + _, err := m.Methods.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Resources != nil { + _, err := m.Resources.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Resources objects. +func (m *Resources) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Response objects. +func (m *Response) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.XRef != "" { + info, err := compiler.ReadInfoForRef(root, m.XRef) + if err != nil { + return nil, err + } + return info, nil + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Resumable objects. +func (m *Resumable) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Schema objects. +func (m *Schema) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + if m.Properties != nil { + _, err := m.Properties.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.AdditionalProperties != nil { + _, err := m.AdditionalProperties.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Items != nil { + _, err := m.Items.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.XRef != "" { + info, err := compiler.ReadInfoForRef(root, m.XRef) + if err != nil { + return nil, err + } + if info != nil { + replacement, err := NewSchema(info, nil) + if err == nil { + *m = *replacement + return m.ResolveReferences(root) + } + } + return info, nil + } + if m.Annotations != nil { + _, err := m.Annotations.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Schemas objects. +func (m *Schemas) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Scope objects. +func (m *Scope) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Scopes objects. +func (m *Scopes) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Simple objects. +func (m *Simple) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside StringArray objects. +func (m *StringArray) ResolveReferences(root string) (interface{}, error) { + errors := make([]error, 0) + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ToRawInfo returns a description of Annotations suitable for JSON or YAML export. +func (m *Annotations) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if len(m.Required) != 0 { + info = append(info, yaml.MapItem{"required", m.Required}) + } + return info +} + +// ToRawInfo returns a description of Any suitable for JSON or YAML export. +func (m *Any) ToRawInfo() interface{} { + var err error + var info1 []yaml.MapSlice + err = yaml.Unmarshal([]byte(m.Yaml), &info1) + if err == nil { + return info1 + } + var info2 yaml.MapSlice + err = yaml.Unmarshal([]byte(m.Yaml), &info2) + if err == nil { + return info2 + } + var info3 interface{} + err = yaml.Unmarshal([]byte(m.Yaml), &info3) + if err == nil { + return info3 + } + return nil +} + +// ToRawInfo returns a description of Auth suitable for JSON or YAML export. +func (m *Auth) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Oauth2 != nil { + info = append(info, yaml.MapItem{"oauth2", m.Oauth2.ToRawInfo()}) + } + // &{Name:oauth2 Type:Oauth2 StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + return info +} + +// ToRawInfo returns a description of Document suitable for JSON or YAML export. +func (m *Document) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Kind != "" { + info = append(info, yaml.MapItem{"kind", m.Kind}) + } + if m.DiscoveryVersion != "" { + info = append(info, yaml.MapItem{"discoveryVersion", m.DiscoveryVersion}) + } + if m.Id != "" { + info = append(info, yaml.MapItem{"id", m.Id}) + } + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + if m.Version != "" { + info = append(info, yaml.MapItem{"version", m.Version}) + } + if m.Revision != "" { + info = append(info, yaml.MapItem{"revision", m.Revision}) + } + if m.Title != "" { + info = append(info, yaml.MapItem{"title", m.Title}) + } + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.Icons != nil { + info = append(info, yaml.MapItem{"icons", m.Icons.ToRawInfo()}) + } + // &{Name:icons Type:Icons StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.DocumentationLink != "" { + info = append(info, yaml.MapItem{"documentationLink", m.DocumentationLink}) + } + if len(m.Labels) != 0 { + info = append(info, yaml.MapItem{"labels", m.Labels}) + } + if m.Protocol != "" { + info = append(info, yaml.MapItem{"protocol", m.Protocol}) + } + if m.BaseUrl != "" { + info = append(info, yaml.MapItem{"baseUrl", m.BaseUrl}) + } + if m.BasePath != "" { + info = append(info, yaml.MapItem{"basePath", m.BasePath}) + } + if m.RootUrl != "" { + info = append(info, yaml.MapItem{"rootUrl", m.RootUrl}) + } + if m.ServicePath != "" { + info = append(info, yaml.MapItem{"servicePath", m.ServicePath}) + } + if m.BatchPath != "" { + info = append(info, yaml.MapItem{"batchPath", m.BatchPath}) + } + if m.Parameters != nil { + info = append(info, yaml.MapItem{"parameters", m.Parameters.ToRawInfo()}) + } + // &{Name:parameters Type:Parameters StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Auth != nil { + info = append(info, yaml.MapItem{"auth", m.Auth.ToRawInfo()}) + } + // &{Name:auth Type:Auth StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if len(m.Features) != 0 { + info = append(info, yaml.MapItem{"features", m.Features}) + } + if m.Schemas != nil { + info = append(info, yaml.MapItem{"schemas", m.Schemas.ToRawInfo()}) + } + // &{Name:schemas Type:Schemas StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Methods != nil { + info = append(info, yaml.MapItem{"methods", m.Methods.ToRawInfo()}) + } + // &{Name:methods Type:Methods StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Resources != nil { + info = append(info, yaml.MapItem{"resources", m.Resources.ToRawInfo()}) + } + // &{Name:resources Type:Resources StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Etag != "" { + info = append(info, yaml.MapItem{"etag", m.Etag}) + } + if m.OwnerDomain != "" { + info = append(info, yaml.MapItem{"ownerDomain", m.OwnerDomain}) + } + if m.OwnerName != "" { + info = append(info, yaml.MapItem{"ownerName", m.OwnerName}) + } + if m.VersionModule != false { + info = append(info, yaml.MapItem{"version_module", m.VersionModule}) + } + if m.CanonicalName != "" { + info = append(info, yaml.MapItem{"canonicalName", m.CanonicalName}) + } + if m.FullyEncodeReservedExpansion != false { + info = append(info, yaml.MapItem{"fullyEncodeReservedExpansion", m.FullyEncodeReservedExpansion}) + } + if m.PackagePath != "" { + info = append(info, yaml.MapItem{"packagePath", m.PackagePath}) + } + return info +} + +// ToRawInfo returns a description of Icons suitable for JSON or YAML export. +func (m *Icons) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.X16 != "" { + info = append(info, yaml.MapItem{"x16", m.X16}) + } + if m.X32 != "" { + info = append(info, yaml.MapItem{"x32", m.X32}) + } + return info +} + +// ToRawInfo returns a description of MediaUpload suitable for JSON or YAML export. +func (m *MediaUpload) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if len(m.Accept) != 0 { + info = append(info, yaml.MapItem{"accept", m.Accept}) + } + if m.MaxSize != "" { + info = append(info, yaml.MapItem{"maxSize", m.MaxSize}) + } + if m.Protocols != nil { + info = append(info, yaml.MapItem{"protocols", m.Protocols.ToRawInfo()}) + } + // &{Name:protocols Type:Protocols StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.SupportsSubscription != false { + info = append(info, yaml.MapItem{"supportsSubscription", m.SupportsSubscription}) + } + return info +} + +// ToRawInfo returns a description of Method suitable for JSON or YAML export. +func (m *Method) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Id != "" { + info = append(info, yaml.MapItem{"id", m.Id}) + } + if m.Path != "" { + info = append(info, yaml.MapItem{"path", m.Path}) + } + if m.HttpMethod != "" { + info = append(info, yaml.MapItem{"httpMethod", m.HttpMethod}) + } + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.Parameters != nil { + info = append(info, yaml.MapItem{"parameters", m.Parameters.ToRawInfo()}) + } + // &{Name:parameters Type:Parameters StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if len(m.ParameterOrder) != 0 { + info = append(info, yaml.MapItem{"parameterOrder", m.ParameterOrder}) + } + if m.Request != nil { + info = append(info, yaml.MapItem{"request", m.Request.ToRawInfo()}) + } + // &{Name:request Type:Request StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Response != nil { + info = append(info, yaml.MapItem{"response", m.Response.ToRawInfo()}) + } + // &{Name:response Type:Response StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if len(m.Scopes) != 0 { + info = append(info, yaml.MapItem{"scopes", m.Scopes}) + } + if m.SupportsMediaDownload != false { + info = append(info, yaml.MapItem{"supportsMediaDownload", m.SupportsMediaDownload}) + } + if m.SupportsMediaUpload != false { + info = append(info, yaml.MapItem{"supportsMediaUpload", m.SupportsMediaUpload}) + } + if m.UseMediaDownloadService != false { + info = append(info, yaml.MapItem{"useMediaDownloadService", m.UseMediaDownloadService}) + } + if m.MediaUpload != nil { + info = append(info, yaml.MapItem{"mediaUpload", m.MediaUpload.ToRawInfo()}) + } + // &{Name:mediaUpload Type:MediaUpload StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.SupportsSubscription != false { + info = append(info, yaml.MapItem{"supportsSubscription", m.SupportsSubscription}) + } + if m.FlatPath != "" { + info = append(info, yaml.MapItem{"flatPath", m.FlatPath}) + } + if m.EtagRequired != false { + info = append(info, yaml.MapItem{"etagRequired", m.EtagRequired}) + } + return info +} + +// ToRawInfo returns a description of Methods suitable for JSON or YAML export. +func (m *Methods) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:additionalProperties Type:NamedMethod StringEnumValues:[] MapType:Method Repeated:true Pattern: Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of NamedMethod suitable for JSON or YAML export. +func (m *NamedMethod) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + // &{Name:value Type:Method StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedParameter suitable for JSON or YAML export. +func (m *NamedParameter) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + // &{Name:value Type:Parameter StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedResource suitable for JSON or YAML export. +func (m *NamedResource) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + // &{Name:value Type:Resource StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedSchema suitable for JSON or YAML export. +func (m *NamedSchema) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + // &{Name:value Type:Schema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedScope suitable for JSON or YAML export. +func (m *NamedScope) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Name != "" { + info = append(info, yaml.MapItem{"name", m.Name}) + } + // &{Name:value Type:Scope StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of Oauth2 suitable for JSON or YAML export. +func (m *Oauth2) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Scopes != nil { + info = append(info, yaml.MapItem{"scopes", m.Scopes.ToRawInfo()}) + } + // &{Name:scopes Type:Scopes StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + return info +} + +// ToRawInfo returns a description of Parameter suitable for JSON or YAML export. +func (m *Parameter) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Id != "" { + info = append(info, yaml.MapItem{"id", m.Id}) + } + if m.Type != "" { + info = append(info, yaml.MapItem{"type", m.Type}) + } + if m.XRef != "" { + info = append(info, yaml.MapItem{"$ref", m.XRef}) + } + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.Default != "" { + info = append(info, yaml.MapItem{"default", m.Default}) + } + if m.Required != false { + info = append(info, yaml.MapItem{"required", m.Required}) + } + if m.Format != "" { + info = append(info, yaml.MapItem{"format", m.Format}) + } + if m.Pattern != "" { + info = append(info, yaml.MapItem{"pattern", m.Pattern}) + } + if m.Minimum != "" { + info = append(info, yaml.MapItem{"minimum", m.Minimum}) + } + if m.Maximum != "" { + info = append(info, yaml.MapItem{"maximum", m.Maximum}) + } + if len(m.Enum) != 0 { + info = append(info, yaml.MapItem{"enum", m.Enum}) + } + if len(m.EnumDescriptions) != 0 { + info = append(info, yaml.MapItem{"enumDescriptions", m.EnumDescriptions}) + } + if m.Repeated != false { + info = append(info, yaml.MapItem{"repeated", m.Repeated}) + } + if m.Location != "" { + info = append(info, yaml.MapItem{"location", m.Location}) + } + if m.Properties != nil { + info = append(info, yaml.MapItem{"properties", m.Properties.ToRawInfo()}) + } + // &{Name:properties Type:Schemas StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.AdditionalProperties != nil { + info = append(info, yaml.MapItem{"additionalProperties", m.AdditionalProperties.ToRawInfo()}) + } + // &{Name:additionalProperties Type:Schema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Items != nil { + info = append(info, yaml.MapItem{"items", m.Items.ToRawInfo()}) + } + // &{Name:items Type:Schema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Annotations != nil { + info = append(info, yaml.MapItem{"annotations", m.Annotations.ToRawInfo()}) + } + // &{Name:annotations Type:Annotations StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + return info +} + +// ToRawInfo returns a description of Parameters suitable for JSON or YAML export. +func (m *Parameters) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:additionalProperties Type:NamedParameter StringEnumValues:[] MapType:Parameter Repeated:true Pattern: Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Protocols suitable for JSON or YAML export. +func (m *Protocols) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Simple != nil { + info = append(info, yaml.MapItem{"simple", m.Simple.ToRawInfo()}) + } + // &{Name:simple Type:Simple StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Resumable != nil { + info = append(info, yaml.MapItem{"resumable", m.Resumable.ToRawInfo()}) + } + // &{Name:resumable Type:Resumable StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + return info +} + +// ToRawInfo returns a description of Request suitable for JSON or YAML export. +func (m *Request) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.XRef != "" { + info = append(info, yaml.MapItem{"$ref", m.XRef}) + } + if m.ParameterName != "" { + info = append(info, yaml.MapItem{"parameterName", m.ParameterName}) + } + return info +} + +// ToRawInfo returns a description of Resource suitable for JSON or YAML export. +func (m *Resource) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Methods != nil { + info = append(info, yaml.MapItem{"methods", m.Methods.ToRawInfo()}) + } + // &{Name:methods Type:Methods StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Resources != nil { + info = append(info, yaml.MapItem{"resources", m.Resources.ToRawInfo()}) + } + // &{Name:resources Type:Resources StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + return info +} + +// ToRawInfo returns a description of Resources suitable for JSON or YAML export. +func (m *Resources) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:additionalProperties Type:NamedResource StringEnumValues:[] MapType:Resource Repeated:true Pattern: Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Response suitable for JSON or YAML export. +func (m *Response) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.XRef != "" { + info = append(info, yaml.MapItem{"$ref", m.XRef}) + } + return info +} + +// ToRawInfo returns a description of Resumable suitable for JSON or YAML export. +func (m *Resumable) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Multipart != false { + info = append(info, yaml.MapItem{"multipart", m.Multipart}) + } + if m.Path != "" { + info = append(info, yaml.MapItem{"path", m.Path}) + } + return info +} + +// ToRawInfo returns a description of Schema suitable for JSON or YAML export. +func (m *Schema) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Id != "" { + info = append(info, yaml.MapItem{"id", m.Id}) + } + if m.Type != "" { + info = append(info, yaml.MapItem{"type", m.Type}) + } + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + if m.Default != "" { + info = append(info, yaml.MapItem{"default", m.Default}) + } + if m.Required != false { + info = append(info, yaml.MapItem{"required", m.Required}) + } + if m.Format != "" { + info = append(info, yaml.MapItem{"format", m.Format}) + } + if m.Pattern != "" { + info = append(info, yaml.MapItem{"pattern", m.Pattern}) + } + if m.Minimum != "" { + info = append(info, yaml.MapItem{"minimum", m.Minimum}) + } + if m.Maximum != "" { + info = append(info, yaml.MapItem{"maximum", m.Maximum}) + } + if len(m.Enum) != 0 { + info = append(info, yaml.MapItem{"enum", m.Enum}) + } + if len(m.EnumDescriptions) != 0 { + info = append(info, yaml.MapItem{"enumDescriptions", m.EnumDescriptions}) + } + if m.Repeated != false { + info = append(info, yaml.MapItem{"repeated", m.Repeated}) + } + if m.Location != "" { + info = append(info, yaml.MapItem{"location", m.Location}) + } + if m.Properties != nil { + info = append(info, yaml.MapItem{"properties", m.Properties.ToRawInfo()}) + } + // &{Name:properties Type:Schemas StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.AdditionalProperties != nil { + info = append(info, yaml.MapItem{"additionalProperties", m.AdditionalProperties.ToRawInfo()}) + } + // &{Name:additionalProperties Type:Schema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.Items != nil { + info = append(info, yaml.MapItem{"items", m.Items.ToRawInfo()}) + } + // &{Name:items Type:Schema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.XRef != "" { + info = append(info, yaml.MapItem{"$ref", m.XRef}) + } + if m.Annotations != nil { + info = append(info, yaml.MapItem{"annotations", m.Annotations.ToRawInfo()}) + } + // &{Name:annotations Type:Annotations StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if m.ReadOnly != false { + info = append(info, yaml.MapItem{"readOnly", m.ReadOnly}) + } + return info +} + +// ToRawInfo returns a description of Schemas suitable for JSON or YAML export. +func (m *Schemas) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:additionalProperties Type:NamedSchema StringEnumValues:[] MapType:Schema Repeated:true Pattern: Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Scope suitable for JSON or YAML export. +func (m *Scope) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Description != "" { + info = append(info, yaml.MapItem{"description", m.Description}) + } + return info +} + +// ToRawInfo returns a description of Scopes suitable for JSON or YAML export. +func (m *Scopes) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()}) + } + } + // &{Name:additionalProperties Type:NamedScope StringEnumValues:[] MapType:Scope Repeated:true Pattern: Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Simple suitable for JSON or YAML export. +func (m *Simple) ToRawInfo() interface{} { + info := yaml.MapSlice{} + if m.Multipart != false { + info = append(info, yaml.MapItem{"multipart", m.Multipart}) + } + if m.Path != "" { + info = append(info, yaml.MapItem{"path", m.Path}) + } + return info +} + +// ToRawInfo returns a description of StringArray suitable for JSON or YAML export. +func (m *StringArray) ToRawInfo() interface{} { + return m.Value +} + +var () diff --git a/vendor/github.com/googleapis/gnostic/discovery/discovery.json b/vendor/github.com/googleapis/gnostic/discovery/discovery.json new file mode 100644 index 000000000..1ae3ceb96 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/discovery/discovery.json @@ -0,0 +1,451 @@ +{ + "title": "A JSON Schema for the Google API Discovery Format.", + "id": "http://openapis.org/v3/schema.json#", + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "description": "This is the root document object of the Discovery Format.", + "required": [ + "kind", + "discoveryVersion" + ], + "additionalProperties": false, + "properties": { + "kind": { + "type": "string" + }, + "discoveryVersion": { + "type": "string" + }, + "id": { + "type": "string" + }, + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "revision": { + "type": "string" + }, + "title": { + "type": "string" + }, + "description": { + "type": "string" + }, + "icons": { + "$ref": "#/definitions/icons" + }, + "documentationLink": { + "type": "string" + }, + "labels": { + "type": "array", + "items": { + "type": "string" + } + }, + "protocol": { + "type": "string" + }, + "baseUrl": { + "type": "string" + }, + "basePath": { + "type": "string" + }, + "rootUrl": { + "type": "string" + }, + "servicePath": { + "type": "string" + }, + "batchPath": { + "type": "string" + }, + "parameters": { + "$ref": "#/definitions/parameters" + }, + "auth": { + "$ref": "#/definitions/auth" + }, + "features": { + "type": "array", + "items": { + "type": "string" + } + }, + "schemas": { + "$ref": "#/definitions/schemas" + }, + "methods": { + "$ref": "#/definitions/methods" + }, + "resources": { + "$ref": "#/definitions/resources" + }, + "etag": { + "type": "string" + }, + "ownerDomain": { + "type": "string" + }, + "ownerName": { + "type": "string" + }, + "version_module": { + "type": "boolean" + }, + "canonicalName": { + "type": "string" + }, + "fullyEncodeReservedExpansion": { + "type": "boolean" + }, + "packagePath": { + "type": "string" + } + }, + "definitions": { + "icons": { + "type": "object", + "description": "Icons that represent the API.", + "required": [ + "x16", + "x32" + ], + "additionalProperties": false, + "properties": { + "x16": { + "type": "string" + }, + "x32": { + "type": "string" + } + } + }, + "parameters": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/parameter" + } + }, + "parameter": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "type": { + "type": "string" + }, + "$ref": { + "type": "string" + }, + "description": { + "type": "string" + }, + "default": { + "type": "string" + }, + "required": { + "type": "boolean" + }, + "format": { + "type": "string" + }, + "pattern": { + "type": "string" + }, + "minimum": { + "type": "string" + }, + "maximum": { + "type": "string" + }, + "enum": { + "type": "array", + "items": { + "type": "string" + } + }, + "enumDescriptions": { + "type": "array", + "items": { + "type": "string" + } + }, + "repeated": { + "type": "boolean" + }, + "location": { + "type": "string" + }, + "properties": { + "$ref": "#/definitions/schemas" + }, + "additionalProperties": { + "$ref": "#/definitions/schema" + }, + "items": { + "$ref": "#/definitions/schema" + }, + "annotations": { + "$ref": "#/definitions/annotations" + } + } + }, + "auth": { + "type": "object", + "properties": { + "oauth2": { + "type": "object", + "properties": { + "scopes": { + "$ref": "#/definitions/scopes" + } + } + } + } + }, + "schemas": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/schema" + } + }, + "schema": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "type": { + "type": "string" + }, + "description": { + "type": "string" + }, + "default": { + "type": "string" + }, + "required": { + "type": "boolean" + }, + "format": { + "type": "string" + }, + "pattern": { + "type": "string" + }, + "minimum": { + "type": "string" + }, + "maximum": { + "type": "string" + }, + "enum": { + "type": "array", + "items": { + "type": "string" + } + }, + "enumDescriptions": { + "type": "array", + "items": { + "type": "string" + } + }, + "repeated": { + "type": "boolean" + }, + "location": { + "type": "string" + }, + "properties": { + "$ref": "#/definitions/schemas" + }, + "additionalProperties": { + "$ref": "#/definitions/schema" + }, + "items": { + "$ref": "#/definitions/schema" + }, + "$ref": { + "type": "string" + }, + "annotations": { + "$ref": "#/definitions/annotations" + }, + "readOnly": { + "type": "boolean" + } + } + }, + "methods": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/method" + } + }, + "method": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "path": { + "type": "string" + }, + "httpMethod": { + "type": "string" + }, + "description": { + "type": "string" + }, + "parameters": { + "$ref": "#/definitions/parameters" + }, + "parameterOrder": { + "type": "array", + "items": { + "type": "string" + } + }, + "request": { + "type": "object", + "properties": { + "$ref": { + "type": "string" + }, + "parameterName": { + "type": "string" + } + } + }, + "response": { + "type": "object", + "properties": { + "$ref": { + "type": "string" + } + } + }, + "scopes": { + "type": "array", + "items": { + "type": "string" + } + }, + "supportsMediaDownload": { + "type": "boolean" + }, + "supportsMediaUpload": { + "type": "boolean" + }, + "useMediaDownloadService": { + "type": "boolean" + }, + "mediaUpload": { + "$ref": "#/definitions/mediaUpload" + }, + "supportsSubscription": { + "type": "boolean" + }, + "flatPath": { + "type": "string" + }, + "etagRequired": { + "type": "boolean" + } + } + }, + "resources": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/resource" + } + }, + "resource": { + "type": "object", + "properties": { + "methods": { + "$ref": "#/definitions/methods" + }, + "resources": { + "$ref": "#/definitions/resources" + } + } + }, + "annotations": { + "type": "object", + "properties": { + "required": { + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "scopes": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/scope" + } + }, + "scope": { + "type": "object", + "properties": { + "description": { + "type": "string" + } + } + }, + "mediaUpload": { + "type": "object", + "properties": { + "accept": { + "type": "array", + "items": { + "type": "string" + } + }, + "maxSize": { + "type": "string" + }, + "protocols": { + "type": "object", + "properties": { + "simple": { + "type": "object", + "properties": { + "multipart": { + "type": "boolean" + }, + "path": { + "type": "string" + } + } + }, + "resumable": { + "type": "object", + "properties": { + "multipart": { + "type": "boolean" + }, + "path": { + "type": "string" + } + } + } + } + }, + "supportsSubscription": { + "type": "boolean" + } + } + } + } +} diff --git a/vendor/github.com/googleapis/gnostic/discovery/discovery.pb.go b/vendor/github.com/googleapis/gnostic/discovery/discovery.pb.go new file mode 100644 index 000000000..bbfe676ca --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/discovery/discovery.pb.go @@ -0,0 +1,1416 @@ +// Code generated by protoc-gen-go. +// source: discovery/discovery.proto +// DO NOT EDIT! + +/* +Package discovery_v1 is a generated protocol buffer package. + +It is generated from these files: + discovery/discovery.proto + +It has these top-level messages: + Annotations + Any + Auth + Document + Icons + MediaUpload + Method + Methods + NamedMethod + NamedParameter + NamedResource + NamedSchema + NamedScope + Oauth2 + Parameter + Parameters + Protocols + Request + Resource + Resources + Response + Resumable + Schema + Schemas + Scope + Scopes + Simple + StringArray +*/ +package discovery_v1 + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import google_protobuf "github.com/golang/protobuf/ptypes/any" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type Annotations struct { + Required []string `protobuf:"bytes,1,rep,name=required" json:"required,omitempty"` +} + +func (m *Annotations) Reset() { *m = Annotations{} } +func (m *Annotations) String() string { return proto.CompactTextString(m) } +func (*Annotations) ProtoMessage() {} +func (*Annotations) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } + +func (m *Annotations) GetRequired() []string { + if m != nil { + return m.Required + } + return nil +} + +type Any struct { + Value *google_protobuf.Any `protobuf:"bytes,1,opt,name=value" json:"value,omitempty"` + Yaml string `protobuf:"bytes,2,opt,name=yaml" json:"yaml,omitempty"` +} + +func (m *Any) Reset() { *m = Any{} } +func (m *Any) String() string { return proto.CompactTextString(m) } +func (*Any) ProtoMessage() {} +func (*Any) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } + +func (m *Any) GetValue() *google_protobuf.Any { + if m != nil { + return m.Value + } + return nil +} + +func (m *Any) GetYaml() string { + if m != nil { + return m.Yaml + } + return "" +} + +type Auth struct { + Oauth2 *Oauth2 `protobuf:"bytes,1,opt,name=oauth2" json:"oauth2,omitempty"` +} + +func (m *Auth) Reset() { *m = Auth{} } +func (m *Auth) String() string { return proto.CompactTextString(m) } +func (*Auth) ProtoMessage() {} +func (*Auth) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } + +func (m *Auth) GetOauth2() *Oauth2 { + if m != nil { + return m.Oauth2 + } + return nil +} + +type Document struct { + Kind string `protobuf:"bytes,1,opt,name=kind" json:"kind,omitempty"` + DiscoveryVersion string `protobuf:"bytes,2,opt,name=discovery_version,json=discoveryVersion" json:"discovery_version,omitempty"` + Id string `protobuf:"bytes,3,opt,name=id" json:"id,omitempty"` + Name string `protobuf:"bytes,4,opt,name=name" json:"name,omitempty"` + Version string `protobuf:"bytes,5,opt,name=version" json:"version,omitempty"` + Revision string `protobuf:"bytes,6,opt,name=revision" json:"revision,omitempty"` + Title string `protobuf:"bytes,7,opt,name=title" json:"title,omitempty"` + Description string `protobuf:"bytes,8,opt,name=description" json:"description,omitempty"` + Icons *Icons `protobuf:"bytes,9,opt,name=icons" json:"icons,omitempty"` + DocumentationLink string `protobuf:"bytes,10,opt,name=documentation_link,json=documentationLink" json:"documentation_link,omitempty"` + Labels []string `protobuf:"bytes,11,rep,name=labels" json:"labels,omitempty"` + Protocol string `protobuf:"bytes,12,opt,name=protocol" json:"protocol,omitempty"` + BaseUrl string `protobuf:"bytes,13,opt,name=base_url,json=baseUrl" json:"base_url,omitempty"` + BasePath string `protobuf:"bytes,14,opt,name=base_path,json=basePath" json:"base_path,omitempty"` + RootUrl string `protobuf:"bytes,15,opt,name=root_url,json=rootUrl" json:"root_url,omitempty"` + ServicePath string `protobuf:"bytes,16,opt,name=service_path,json=servicePath" json:"service_path,omitempty"` + BatchPath string `protobuf:"bytes,17,opt,name=batch_path,json=batchPath" json:"batch_path,omitempty"` + Parameters *Parameters `protobuf:"bytes,18,opt,name=parameters" json:"parameters,omitempty"` + Auth *Auth `protobuf:"bytes,19,opt,name=auth" json:"auth,omitempty"` + Features []string `protobuf:"bytes,20,rep,name=features" json:"features,omitempty"` + Schemas *Schemas `protobuf:"bytes,21,opt,name=schemas" json:"schemas,omitempty"` + Methods *Methods `protobuf:"bytes,22,opt,name=methods" json:"methods,omitempty"` + Resources *Resources `protobuf:"bytes,23,opt,name=resources" json:"resources,omitempty"` + Etag string `protobuf:"bytes,24,opt,name=etag" json:"etag,omitempty"` + OwnerDomain string `protobuf:"bytes,25,opt,name=owner_domain,json=ownerDomain" json:"owner_domain,omitempty"` + OwnerName string `protobuf:"bytes,26,opt,name=owner_name,json=ownerName" json:"owner_name,omitempty"` + VersionModule bool `protobuf:"varint,27,opt,name=version_module,json=versionModule" json:"version_module,omitempty"` + CanonicalName string `protobuf:"bytes,28,opt,name=canonical_name,json=canonicalName" json:"canonical_name,omitempty"` + FullyEncodeReservedExpansion bool `protobuf:"varint,29,opt,name=fully_encode_reserved_expansion,json=fullyEncodeReservedExpansion" json:"fully_encode_reserved_expansion,omitempty"` + PackagePath string `protobuf:"bytes,30,opt,name=package_path,json=packagePath" json:"package_path,omitempty"` +} + +func (m *Document) Reset() { *m = Document{} } +func (m *Document) String() string { return proto.CompactTextString(m) } +func (*Document) ProtoMessage() {} +func (*Document) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } + +func (m *Document) GetKind() string { + if m != nil { + return m.Kind + } + return "" +} + +func (m *Document) GetDiscoveryVersion() string { + if m != nil { + return m.DiscoveryVersion + } + return "" +} + +func (m *Document) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *Document) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Document) GetVersion() string { + if m != nil { + return m.Version + } + return "" +} + +func (m *Document) GetRevision() string { + if m != nil { + return m.Revision + } + return "" +} + +func (m *Document) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *Document) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Document) GetIcons() *Icons { + if m != nil { + return m.Icons + } + return nil +} + +func (m *Document) GetDocumentationLink() string { + if m != nil { + return m.DocumentationLink + } + return "" +} + +func (m *Document) GetLabels() []string { + if m != nil { + return m.Labels + } + return nil +} + +func (m *Document) GetProtocol() string { + if m != nil { + return m.Protocol + } + return "" +} + +func (m *Document) GetBaseUrl() string { + if m != nil { + return m.BaseUrl + } + return "" +} + +func (m *Document) GetBasePath() string { + if m != nil { + return m.BasePath + } + return "" +} + +func (m *Document) GetRootUrl() string { + if m != nil { + return m.RootUrl + } + return "" +} + +func (m *Document) GetServicePath() string { + if m != nil { + return m.ServicePath + } + return "" +} + +func (m *Document) GetBatchPath() string { + if m != nil { + return m.BatchPath + } + return "" +} + +func (m *Document) GetParameters() *Parameters { + if m != nil { + return m.Parameters + } + return nil +} + +func (m *Document) GetAuth() *Auth { + if m != nil { + return m.Auth + } + return nil +} + +func (m *Document) GetFeatures() []string { + if m != nil { + return m.Features + } + return nil +} + +func (m *Document) GetSchemas() *Schemas { + if m != nil { + return m.Schemas + } + return nil +} + +func (m *Document) GetMethods() *Methods { + if m != nil { + return m.Methods + } + return nil +} + +func (m *Document) GetResources() *Resources { + if m != nil { + return m.Resources + } + return nil +} + +func (m *Document) GetEtag() string { + if m != nil { + return m.Etag + } + return "" +} + +func (m *Document) GetOwnerDomain() string { + if m != nil { + return m.OwnerDomain + } + return "" +} + +func (m *Document) GetOwnerName() string { + if m != nil { + return m.OwnerName + } + return "" +} + +func (m *Document) GetVersionModule() bool { + if m != nil { + return m.VersionModule + } + return false +} + +func (m *Document) GetCanonicalName() string { + if m != nil { + return m.CanonicalName + } + return "" +} + +func (m *Document) GetFullyEncodeReservedExpansion() bool { + if m != nil { + return m.FullyEncodeReservedExpansion + } + return false +} + +func (m *Document) GetPackagePath() string { + if m != nil { + return m.PackagePath + } + return "" +} + +// Icons that represent the API. +type Icons struct { + X16 string `protobuf:"bytes,1,opt,name=x16" json:"x16,omitempty"` + X32 string `protobuf:"bytes,2,opt,name=x32" json:"x32,omitempty"` +} + +func (m *Icons) Reset() { *m = Icons{} } +func (m *Icons) String() string { return proto.CompactTextString(m) } +func (*Icons) ProtoMessage() {} +func (*Icons) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } + +func (m *Icons) GetX16() string { + if m != nil { + return m.X16 + } + return "" +} + +func (m *Icons) GetX32() string { + if m != nil { + return m.X32 + } + return "" +} + +type MediaUpload struct { + Accept []string `protobuf:"bytes,1,rep,name=accept" json:"accept,omitempty"` + MaxSize string `protobuf:"bytes,2,opt,name=max_size,json=maxSize" json:"max_size,omitempty"` + Protocols *Protocols `protobuf:"bytes,3,opt,name=protocols" json:"protocols,omitempty"` + SupportsSubscription bool `protobuf:"varint,4,opt,name=supports_subscription,json=supportsSubscription" json:"supports_subscription,omitempty"` +} + +func (m *MediaUpload) Reset() { *m = MediaUpload{} } +func (m *MediaUpload) String() string { return proto.CompactTextString(m) } +func (*MediaUpload) ProtoMessage() {} +func (*MediaUpload) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} } + +func (m *MediaUpload) GetAccept() []string { + if m != nil { + return m.Accept + } + return nil +} + +func (m *MediaUpload) GetMaxSize() string { + if m != nil { + return m.MaxSize + } + return "" +} + +func (m *MediaUpload) GetProtocols() *Protocols { + if m != nil { + return m.Protocols + } + return nil +} + +func (m *MediaUpload) GetSupportsSubscription() bool { + if m != nil { + return m.SupportsSubscription + } + return false +} + +type Method struct { + Id string `protobuf:"bytes,1,opt,name=id" json:"id,omitempty"` + Path string `protobuf:"bytes,2,opt,name=path" json:"path,omitempty"` + HttpMethod string `protobuf:"bytes,3,opt,name=http_method,json=httpMethod" json:"http_method,omitempty"` + Description string `protobuf:"bytes,4,opt,name=description" json:"description,omitempty"` + Parameters *Parameters `protobuf:"bytes,5,opt,name=parameters" json:"parameters,omitempty"` + ParameterOrder []string `protobuf:"bytes,6,rep,name=parameter_order,json=parameterOrder" json:"parameter_order,omitempty"` + Request *Request `protobuf:"bytes,7,opt,name=request" json:"request,omitempty"` + Response *Response `protobuf:"bytes,8,opt,name=response" json:"response,omitempty"` + Scopes []string `protobuf:"bytes,9,rep,name=scopes" json:"scopes,omitempty"` + SupportsMediaDownload bool `protobuf:"varint,10,opt,name=supports_media_download,json=supportsMediaDownload" json:"supports_media_download,omitempty"` + SupportsMediaUpload bool `protobuf:"varint,11,opt,name=supports_media_upload,json=supportsMediaUpload" json:"supports_media_upload,omitempty"` + UseMediaDownloadService bool `protobuf:"varint,12,opt,name=use_media_download_service,json=useMediaDownloadService" json:"use_media_download_service,omitempty"` + MediaUpload *MediaUpload `protobuf:"bytes,13,opt,name=media_upload,json=mediaUpload" json:"media_upload,omitempty"` + SupportsSubscription bool `protobuf:"varint,14,opt,name=supports_subscription,json=supportsSubscription" json:"supports_subscription,omitempty"` + FlatPath string `protobuf:"bytes,15,opt,name=flat_path,json=flatPath" json:"flat_path,omitempty"` + EtagRequired bool `protobuf:"varint,16,opt,name=etag_required,json=etagRequired" json:"etag_required,omitempty"` +} + +func (m *Method) Reset() { *m = Method{} } +func (m *Method) String() string { return proto.CompactTextString(m) } +func (*Method) ProtoMessage() {} +func (*Method) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{6} } + +func (m *Method) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *Method) GetPath() string { + if m != nil { + return m.Path + } + return "" +} + +func (m *Method) GetHttpMethod() string { + if m != nil { + return m.HttpMethod + } + return "" +} + +func (m *Method) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Method) GetParameters() *Parameters { + if m != nil { + return m.Parameters + } + return nil +} + +func (m *Method) GetParameterOrder() []string { + if m != nil { + return m.ParameterOrder + } + return nil +} + +func (m *Method) GetRequest() *Request { + if m != nil { + return m.Request + } + return nil +} + +func (m *Method) GetResponse() *Response { + if m != nil { + return m.Response + } + return nil +} + +func (m *Method) GetScopes() []string { + if m != nil { + return m.Scopes + } + return nil +} + +func (m *Method) GetSupportsMediaDownload() bool { + if m != nil { + return m.SupportsMediaDownload + } + return false +} + +func (m *Method) GetSupportsMediaUpload() bool { + if m != nil { + return m.SupportsMediaUpload + } + return false +} + +func (m *Method) GetUseMediaDownloadService() bool { + if m != nil { + return m.UseMediaDownloadService + } + return false +} + +func (m *Method) GetMediaUpload() *MediaUpload { + if m != nil { + return m.MediaUpload + } + return nil +} + +func (m *Method) GetSupportsSubscription() bool { + if m != nil { + return m.SupportsSubscription + } + return false +} + +func (m *Method) GetFlatPath() string { + if m != nil { + return m.FlatPath + } + return "" +} + +func (m *Method) GetEtagRequired() bool { + if m != nil { + return m.EtagRequired + } + return false +} + +type Methods struct { + AdditionalProperties []*NamedMethod `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` +} + +func (m *Methods) Reset() { *m = Methods{} } +func (m *Methods) String() string { return proto.CompactTextString(m) } +func (*Methods) ProtoMessage() {} +func (*Methods) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{7} } + +func (m *Methods) GetAdditionalProperties() []*NamedMethod { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +// Automatically-generated message used to represent maps of Method as ordered (name,value) pairs. +type NamedMethod struct { + // Map key + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Mapped value + Value *Method `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` +} + +func (m *NamedMethod) Reset() { *m = NamedMethod{} } +func (m *NamedMethod) String() string { return proto.CompactTextString(m) } +func (*NamedMethod) ProtoMessage() {} +func (*NamedMethod) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{8} } + +func (m *NamedMethod) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NamedMethod) GetValue() *Method { + if m != nil { + return m.Value + } + return nil +} + +// Automatically-generated message used to represent maps of Parameter as ordered (name,value) pairs. +type NamedParameter struct { + // Map key + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Mapped value + Value *Parameter `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` +} + +func (m *NamedParameter) Reset() { *m = NamedParameter{} } +func (m *NamedParameter) String() string { return proto.CompactTextString(m) } +func (*NamedParameter) ProtoMessage() {} +func (*NamedParameter) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{9} } + +func (m *NamedParameter) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NamedParameter) GetValue() *Parameter { + if m != nil { + return m.Value + } + return nil +} + +// Automatically-generated message used to represent maps of Resource as ordered (name,value) pairs. +type NamedResource struct { + // Map key + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Mapped value + Value *Resource `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` +} + +func (m *NamedResource) Reset() { *m = NamedResource{} } +func (m *NamedResource) String() string { return proto.CompactTextString(m) } +func (*NamedResource) ProtoMessage() {} +func (*NamedResource) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{10} } + +func (m *NamedResource) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NamedResource) GetValue() *Resource { + if m != nil { + return m.Value + } + return nil +} + +// Automatically-generated message used to represent maps of Schema as ordered (name,value) pairs. +type NamedSchema struct { + // Map key + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Mapped value + Value *Schema `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` +} + +func (m *NamedSchema) Reset() { *m = NamedSchema{} } +func (m *NamedSchema) String() string { return proto.CompactTextString(m) } +func (*NamedSchema) ProtoMessage() {} +func (*NamedSchema) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{11} } + +func (m *NamedSchema) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NamedSchema) GetValue() *Schema { + if m != nil { + return m.Value + } + return nil +} + +// Automatically-generated message used to represent maps of Scope as ordered (name,value) pairs. +type NamedScope struct { + // Map key + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Mapped value + Value *Scope `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` +} + +func (m *NamedScope) Reset() { *m = NamedScope{} } +func (m *NamedScope) String() string { return proto.CompactTextString(m) } +func (*NamedScope) ProtoMessage() {} +func (*NamedScope) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{12} } + +func (m *NamedScope) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NamedScope) GetValue() *Scope { + if m != nil { + return m.Value + } + return nil +} + +type Oauth2 struct { + Scopes *Scopes `protobuf:"bytes,1,opt,name=scopes" json:"scopes,omitempty"` +} + +func (m *Oauth2) Reset() { *m = Oauth2{} } +func (m *Oauth2) String() string { return proto.CompactTextString(m) } +func (*Oauth2) ProtoMessage() {} +func (*Oauth2) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{13} } + +func (m *Oauth2) GetScopes() *Scopes { + if m != nil { + return m.Scopes + } + return nil +} + +type Parameter struct { + Id string `protobuf:"bytes,1,opt,name=id" json:"id,omitempty"` + Type string `protobuf:"bytes,2,opt,name=type" json:"type,omitempty"` + XRef string `protobuf:"bytes,3,opt,name=_ref,json=Ref" json:"_ref,omitempty"` + Description string `protobuf:"bytes,4,opt,name=description" json:"description,omitempty"` + Default string `protobuf:"bytes,5,opt,name=default" json:"default,omitempty"` + Required bool `protobuf:"varint,6,opt,name=required" json:"required,omitempty"` + Format string `protobuf:"bytes,7,opt,name=format" json:"format,omitempty"` + Pattern string `protobuf:"bytes,8,opt,name=pattern" json:"pattern,omitempty"` + Minimum string `protobuf:"bytes,9,opt,name=minimum" json:"minimum,omitempty"` + Maximum string `protobuf:"bytes,10,opt,name=maximum" json:"maximum,omitempty"` + Enum []string `protobuf:"bytes,11,rep,name=enum" json:"enum,omitempty"` + EnumDescriptions []string `protobuf:"bytes,12,rep,name=enum_descriptions,json=enumDescriptions" json:"enum_descriptions,omitempty"` + Repeated bool `protobuf:"varint,13,opt,name=repeated" json:"repeated,omitempty"` + Location string `protobuf:"bytes,14,opt,name=location" json:"location,omitempty"` + Properties *Schemas `protobuf:"bytes,15,opt,name=properties" json:"properties,omitempty"` + AdditionalProperties *Schema `protobuf:"bytes,16,opt,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` + Items *Schema `protobuf:"bytes,17,opt,name=items" json:"items,omitempty"` + Annotations *Annotations `protobuf:"bytes,18,opt,name=annotations" json:"annotations,omitempty"` +} + +func (m *Parameter) Reset() { *m = Parameter{} } +func (m *Parameter) String() string { return proto.CompactTextString(m) } +func (*Parameter) ProtoMessage() {} +func (*Parameter) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{14} } + +func (m *Parameter) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *Parameter) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *Parameter) GetXRef() string { + if m != nil { + return m.XRef + } + return "" +} + +func (m *Parameter) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Parameter) GetDefault() string { + if m != nil { + return m.Default + } + return "" +} + +func (m *Parameter) GetRequired() bool { + if m != nil { + return m.Required + } + return false +} + +func (m *Parameter) GetFormat() string { + if m != nil { + return m.Format + } + return "" +} + +func (m *Parameter) GetPattern() string { + if m != nil { + return m.Pattern + } + return "" +} + +func (m *Parameter) GetMinimum() string { + if m != nil { + return m.Minimum + } + return "" +} + +func (m *Parameter) GetMaximum() string { + if m != nil { + return m.Maximum + } + return "" +} + +func (m *Parameter) GetEnum() []string { + if m != nil { + return m.Enum + } + return nil +} + +func (m *Parameter) GetEnumDescriptions() []string { + if m != nil { + return m.EnumDescriptions + } + return nil +} + +func (m *Parameter) GetRepeated() bool { + if m != nil { + return m.Repeated + } + return false +} + +func (m *Parameter) GetLocation() string { + if m != nil { + return m.Location + } + return "" +} + +func (m *Parameter) GetProperties() *Schemas { + if m != nil { + return m.Properties + } + return nil +} + +func (m *Parameter) GetAdditionalProperties() *Schema { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +func (m *Parameter) GetItems() *Schema { + if m != nil { + return m.Items + } + return nil +} + +func (m *Parameter) GetAnnotations() *Annotations { + if m != nil { + return m.Annotations + } + return nil +} + +type Parameters struct { + AdditionalProperties []*NamedParameter `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` +} + +func (m *Parameters) Reset() { *m = Parameters{} } +func (m *Parameters) String() string { return proto.CompactTextString(m) } +func (*Parameters) ProtoMessage() {} +func (*Parameters) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{15} } + +func (m *Parameters) GetAdditionalProperties() []*NamedParameter { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +type Protocols struct { + Simple *Simple `protobuf:"bytes,1,opt,name=simple" json:"simple,omitempty"` + Resumable *Resumable `protobuf:"bytes,2,opt,name=resumable" json:"resumable,omitempty"` +} + +func (m *Protocols) Reset() { *m = Protocols{} } +func (m *Protocols) String() string { return proto.CompactTextString(m) } +func (*Protocols) ProtoMessage() {} +func (*Protocols) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{16} } + +func (m *Protocols) GetSimple() *Simple { + if m != nil { + return m.Simple + } + return nil +} + +func (m *Protocols) GetResumable() *Resumable { + if m != nil { + return m.Resumable + } + return nil +} + +type Request struct { + XRef string `protobuf:"bytes,1,opt,name=_ref,json=Ref" json:"_ref,omitempty"` + ParameterName string `protobuf:"bytes,2,opt,name=parameter_name,json=parameterName" json:"parameter_name,omitempty"` +} + +func (m *Request) Reset() { *m = Request{} } +func (m *Request) String() string { return proto.CompactTextString(m) } +func (*Request) ProtoMessage() {} +func (*Request) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{17} } + +func (m *Request) GetXRef() string { + if m != nil { + return m.XRef + } + return "" +} + +func (m *Request) GetParameterName() string { + if m != nil { + return m.ParameterName + } + return "" +} + +type Resource struct { + Methods *Methods `protobuf:"bytes,1,opt,name=methods" json:"methods,omitempty"` + Resources *Resources `protobuf:"bytes,2,opt,name=resources" json:"resources,omitempty"` +} + +func (m *Resource) Reset() { *m = Resource{} } +func (m *Resource) String() string { return proto.CompactTextString(m) } +func (*Resource) ProtoMessage() {} +func (*Resource) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{18} } + +func (m *Resource) GetMethods() *Methods { + if m != nil { + return m.Methods + } + return nil +} + +func (m *Resource) GetResources() *Resources { + if m != nil { + return m.Resources + } + return nil +} + +type Resources struct { + AdditionalProperties []*NamedResource `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` +} + +func (m *Resources) Reset() { *m = Resources{} } +func (m *Resources) String() string { return proto.CompactTextString(m) } +func (*Resources) ProtoMessage() {} +func (*Resources) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{19} } + +func (m *Resources) GetAdditionalProperties() []*NamedResource { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +type Response struct { + XRef string `protobuf:"bytes,1,opt,name=_ref,json=Ref" json:"_ref,omitempty"` +} + +func (m *Response) Reset() { *m = Response{} } +func (m *Response) String() string { return proto.CompactTextString(m) } +func (*Response) ProtoMessage() {} +func (*Response) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{20} } + +func (m *Response) GetXRef() string { + if m != nil { + return m.XRef + } + return "" +} + +type Resumable struct { + Multipart bool `protobuf:"varint,1,opt,name=multipart" json:"multipart,omitempty"` + Path string `protobuf:"bytes,2,opt,name=path" json:"path,omitempty"` +} + +func (m *Resumable) Reset() { *m = Resumable{} } +func (m *Resumable) String() string { return proto.CompactTextString(m) } +func (*Resumable) ProtoMessage() {} +func (*Resumable) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{21} } + +func (m *Resumable) GetMultipart() bool { + if m != nil { + return m.Multipart + } + return false +} + +func (m *Resumable) GetPath() string { + if m != nil { + return m.Path + } + return "" +} + +type Schema struct { + Id string `protobuf:"bytes,1,opt,name=id" json:"id,omitempty"` + Type string `protobuf:"bytes,2,opt,name=type" json:"type,omitempty"` + Description string `protobuf:"bytes,3,opt,name=description" json:"description,omitempty"` + Default string `protobuf:"bytes,4,opt,name=default" json:"default,omitempty"` + Required bool `protobuf:"varint,5,opt,name=required" json:"required,omitempty"` + Format string `protobuf:"bytes,6,opt,name=format" json:"format,omitempty"` + Pattern string `protobuf:"bytes,7,opt,name=pattern" json:"pattern,omitempty"` + Minimum string `protobuf:"bytes,8,opt,name=minimum" json:"minimum,omitempty"` + Maximum string `protobuf:"bytes,9,opt,name=maximum" json:"maximum,omitempty"` + Enum []string `protobuf:"bytes,10,rep,name=enum" json:"enum,omitempty"` + EnumDescriptions []string `protobuf:"bytes,11,rep,name=enum_descriptions,json=enumDescriptions" json:"enum_descriptions,omitempty"` + Repeated bool `protobuf:"varint,12,opt,name=repeated" json:"repeated,omitempty"` + Location string `protobuf:"bytes,13,opt,name=location" json:"location,omitempty"` + Properties *Schemas `protobuf:"bytes,14,opt,name=properties" json:"properties,omitempty"` + AdditionalProperties *Schema `protobuf:"bytes,15,opt,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` + Items *Schema `protobuf:"bytes,16,opt,name=items" json:"items,omitempty"` + XRef string `protobuf:"bytes,17,opt,name=_ref,json=Ref" json:"_ref,omitempty"` + Annotations *Annotations `protobuf:"bytes,18,opt,name=annotations" json:"annotations,omitempty"` + ReadOnly bool `protobuf:"varint,19,opt,name=read_only,json=readOnly" json:"read_only,omitempty"` +} + +func (m *Schema) Reset() { *m = Schema{} } +func (m *Schema) String() string { return proto.CompactTextString(m) } +func (*Schema) ProtoMessage() {} +func (*Schema) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{22} } + +func (m *Schema) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *Schema) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *Schema) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Schema) GetDefault() string { + if m != nil { + return m.Default + } + return "" +} + +func (m *Schema) GetRequired() bool { + if m != nil { + return m.Required + } + return false +} + +func (m *Schema) GetFormat() string { + if m != nil { + return m.Format + } + return "" +} + +func (m *Schema) GetPattern() string { + if m != nil { + return m.Pattern + } + return "" +} + +func (m *Schema) GetMinimum() string { + if m != nil { + return m.Minimum + } + return "" +} + +func (m *Schema) GetMaximum() string { + if m != nil { + return m.Maximum + } + return "" +} + +func (m *Schema) GetEnum() []string { + if m != nil { + return m.Enum + } + return nil +} + +func (m *Schema) GetEnumDescriptions() []string { + if m != nil { + return m.EnumDescriptions + } + return nil +} + +func (m *Schema) GetRepeated() bool { + if m != nil { + return m.Repeated + } + return false +} + +func (m *Schema) GetLocation() string { + if m != nil { + return m.Location + } + return "" +} + +func (m *Schema) GetProperties() *Schemas { + if m != nil { + return m.Properties + } + return nil +} + +func (m *Schema) GetAdditionalProperties() *Schema { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +func (m *Schema) GetItems() *Schema { + if m != nil { + return m.Items + } + return nil +} + +func (m *Schema) GetXRef() string { + if m != nil { + return m.XRef + } + return "" +} + +func (m *Schema) GetAnnotations() *Annotations { + if m != nil { + return m.Annotations + } + return nil +} + +func (m *Schema) GetReadOnly() bool { + if m != nil { + return m.ReadOnly + } + return false +} + +type Schemas struct { + AdditionalProperties []*NamedSchema `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` +} + +func (m *Schemas) Reset() { *m = Schemas{} } +func (m *Schemas) String() string { return proto.CompactTextString(m) } +func (*Schemas) ProtoMessage() {} +func (*Schemas) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{23} } + +func (m *Schemas) GetAdditionalProperties() []*NamedSchema { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +type Scope struct { + Description string `protobuf:"bytes,1,opt,name=description" json:"description,omitempty"` +} + +func (m *Scope) Reset() { *m = Scope{} } +func (m *Scope) String() string { return proto.CompactTextString(m) } +func (*Scope) ProtoMessage() {} +func (*Scope) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{24} } + +func (m *Scope) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +type Scopes struct { + AdditionalProperties []*NamedScope `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties" json:"additional_properties,omitempty"` +} + +func (m *Scopes) Reset() { *m = Scopes{} } +func (m *Scopes) String() string { return proto.CompactTextString(m) } +func (*Scopes) ProtoMessage() {} +func (*Scopes) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{25} } + +func (m *Scopes) GetAdditionalProperties() []*NamedScope { + if m != nil { + return m.AdditionalProperties + } + return nil +} + +type Simple struct { + Multipart bool `protobuf:"varint,1,opt,name=multipart" json:"multipart,omitempty"` + Path string `protobuf:"bytes,2,opt,name=path" json:"path,omitempty"` +} + +func (m *Simple) Reset() { *m = Simple{} } +func (m *Simple) String() string { return proto.CompactTextString(m) } +func (*Simple) ProtoMessage() {} +func (*Simple) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{26} } + +func (m *Simple) GetMultipart() bool { + if m != nil { + return m.Multipart + } + return false +} + +func (m *Simple) GetPath() string { + if m != nil { + return m.Path + } + return "" +} + +type StringArray struct { + Value []string `protobuf:"bytes,1,rep,name=value" json:"value,omitempty"` +} + +func (m *StringArray) Reset() { *m = StringArray{} } +func (m *StringArray) String() string { return proto.CompactTextString(m) } +func (*StringArray) ProtoMessage() {} +func (*StringArray) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{27} } + +func (m *StringArray) GetValue() []string { + if m != nil { + return m.Value + } + return nil +} + +func init() { + proto.RegisterType((*Annotations)(nil), "discovery.v1.Annotations") + proto.RegisterType((*Any)(nil), "discovery.v1.Any") + proto.RegisterType((*Auth)(nil), "discovery.v1.Auth") + proto.RegisterType((*Document)(nil), "discovery.v1.Document") + proto.RegisterType((*Icons)(nil), "discovery.v1.Icons") + proto.RegisterType((*MediaUpload)(nil), "discovery.v1.MediaUpload") + proto.RegisterType((*Method)(nil), "discovery.v1.Method") + proto.RegisterType((*Methods)(nil), "discovery.v1.Methods") + proto.RegisterType((*NamedMethod)(nil), "discovery.v1.NamedMethod") + proto.RegisterType((*NamedParameter)(nil), "discovery.v1.NamedParameter") + proto.RegisterType((*NamedResource)(nil), "discovery.v1.NamedResource") + proto.RegisterType((*NamedSchema)(nil), "discovery.v1.NamedSchema") + proto.RegisterType((*NamedScope)(nil), "discovery.v1.NamedScope") + proto.RegisterType((*Oauth2)(nil), "discovery.v1.Oauth2") + proto.RegisterType((*Parameter)(nil), "discovery.v1.Parameter") + proto.RegisterType((*Parameters)(nil), "discovery.v1.Parameters") + proto.RegisterType((*Protocols)(nil), "discovery.v1.Protocols") + proto.RegisterType((*Request)(nil), "discovery.v1.Request") + proto.RegisterType((*Resource)(nil), "discovery.v1.Resource") + proto.RegisterType((*Resources)(nil), "discovery.v1.Resources") + proto.RegisterType((*Response)(nil), "discovery.v1.Response") + proto.RegisterType((*Resumable)(nil), "discovery.v1.Resumable") + proto.RegisterType((*Schema)(nil), "discovery.v1.Schema") + proto.RegisterType((*Schemas)(nil), "discovery.v1.Schemas") + proto.RegisterType((*Scope)(nil), "discovery.v1.Scope") + proto.RegisterType((*Scopes)(nil), "discovery.v1.Scopes") + proto.RegisterType((*Simple)(nil), "discovery.v1.Simple") + proto.RegisterType((*StringArray)(nil), "discovery.v1.StringArray") +} + +func init() { proto.RegisterFile("discovery/discovery.proto", fileDescriptor0) } + +var fileDescriptor0 = []byte{ + // 1683 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0xa4, 0x58, 0x5f, 0x53, 0xdc, 0xc8, + 0x11, 0xaf, 0x85, 0xfd, 0xdb, 0xbb, 0x0b, 0xcb, 0xf0, 0x4f, 0x60, 0x9c, 0x23, 0xba, 0xba, 0x04, + 0x72, 0x77, 0x50, 0xc6, 0x39, 0x57, 0x2a, 0x4e, 0x1e, 0x70, 0xe0, 0xc1, 0x95, 0x60, 0xb0, 0x28, + 0x27, 0x95, 0x87, 0x94, 0x6a, 0x90, 0x66, 0x59, 0x15, 0x92, 0x46, 0x19, 0x8d, 0x30, 0xeb, 0x8f, + 0x93, 0xaa, 0xbc, 0x25, 0xdf, 0x2a, 0x9f, 0x23, 0x75, 0x35, 0x3d, 0x23, 0xad, 0x16, 0xb4, 0x6b, + 0x8c, 0x9f, 0x98, 0xee, 0x5f, 0x77, 0x6b, 0xa6, 0xa7, 0xfb, 0x37, 0xbd, 0xc0, 0x96, 0x1f, 0xa4, + 0x1e, 0xbf, 0x65, 0x62, 0x7c, 0x58, 0xac, 0x0e, 0x12, 0xc1, 0x25, 0x27, 0xbd, 0x89, 0xe2, 0xf6, + 0xc5, 0xf6, 0xd6, 0x35, 0xe7, 0xd7, 0x21, 0x3b, 0x44, 0xec, 0x2a, 0x1b, 0x1e, 0xd2, 0xd8, 0x18, + 0xda, 0xfb, 0xd0, 0x3d, 0x8e, 0x63, 0x2e, 0xa9, 0x0c, 0x78, 0x9c, 0x92, 0x6d, 0x68, 0x0b, 0xf6, + 0xcf, 0x2c, 0x10, 0xcc, 0xb7, 0x6a, 0xbb, 0x8b, 0x7b, 0x1d, 0xa7, 0x90, 0xed, 0x53, 0x58, 0x3c, + 0x8e, 0xc7, 0xe4, 0x37, 0xd0, 0xb8, 0xa5, 0x61, 0xc6, 0xac, 0xda, 0x6e, 0x6d, 0xaf, 0x7b, 0xb4, + 0x76, 0xa0, 0x83, 0x1f, 0xe4, 0xc1, 0x0f, 0x8e, 0xe3, 0xb1, 0xa3, 0x4d, 0x08, 0x81, 0xfa, 0x98, + 0x46, 0xa1, 0xb5, 0xb0, 0x5b, 0xdb, 0xeb, 0x38, 0xb8, 0xb6, 0x7f, 0x0b, 0xf5, 0xe3, 0x4c, 0x8e, + 0xc8, 0x0f, 0xd0, 0xe4, 0x34, 0x93, 0xa3, 0xa3, 0x22, 0x50, 0x79, 0xcf, 0x07, 0xe7, 0x88, 0x39, + 0xc6, 0xc6, 0xfe, 0x77, 0x1b, 0xda, 0x27, 0xdc, 0xcb, 0x22, 0x16, 0x4b, 0x15, 0xf6, 0x26, 0x88, + 0x7d, 0x74, 0xec, 0x38, 0xb8, 0x26, 0xdf, 0xc3, 0x4a, 0xe1, 0xef, 0xde, 0x32, 0x91, 0x06, 0x3c, + 0x36, 0xdf, 0x1d, 0x14, 0xc0, 0x5f, 0xb5, 0x9e, 0x2c, 0xc1, 0x42, 0xe0, 0x5b, 0x8b, 0x88, 0x2e, + 0x04, 0xbe, 0x0a, 0x18, 0xd3, 0x88, 0x59, 0x75, 0x1d, 0x50, 0xad, 0x89, 0x05, 0xad, 0x3c, 0x4c, + 0x03, 0xd5, 0xb9, 0xa8, 0x93, 0x74, 0x1b, 0x20, 0xd4, 0x44, 0xa8, 0x90, 0xc9, 0x1a, 0x34, 0x64, + 0x20, 0x43, 0x66, 0xb5, 0x10, 0xd0, 0x02, 0xd9, 0x85, 0xae, 0xcf, 0x52, 0x4f, 0x04, 0x89, 0x4a, + 0xb3, 0xd5, 0x46, 0xac, 0xac, 0x22, 0xfb, 0xd0, 0x08, 0x3c, 0x1e, 0xa7, 0x56, 0x07, 0x93, 0xb1, + 0x3a, 0x9d, 0x8c, 0xb7, 0x0a, 0x72, 0xb4, 0x05, 0xf9, 0x11, 0x88, 0x6f, 0x32, 0x81, 0xb7, 0xe6, + 0x86, 0x41, 0x7c, 0x63, 0x01, 0xc6, 0x5c, 0x99, 0x42, 0xfe, 0x12, 0xc4, 0x37, 0x64, 0x03, 0x9a, + 0x21, 0xbd, 0x62, 0x61, 0x6a, 0x75, 0xf1, 0x42, 0x8d, 0xa4, 0x4e, 0x81, 0x57, 0xe6, 0xf1, 0xd0, + 0xea, 0xe9, 0x53, 0xe4, 0x32, 0xd9, 0x82, 0xf6, 0x15, 0x4d, 0x99, 0x9b, 0x89, 0xd0, 0xea, 0xeb, + 0xc3, 0x2b, 0xf9, 0x83, 0x08, 0xc9, 0x33, 0xe8, 0x20, 0x94, 0x50, 0x39, 0xb2, 0x96, 0xb4, 0x9f, + 0x52, 0x5c, 0x50, 0x39, 0x52, 0x7e, 0x82, 0x73, 0x89, 0x7e, 0xcb, 0xda, 0x4f, 0xc9, 0xca, 0xef, + 0x97, 0xd0, 0x4b, 0x99, 0xb8, 0x0d, 0x3c, 0xe3, 0x3a, 0xd0, 0x39, 0x30, 0x3a, 0xf4, 0x7e, 0x0e, + 0x70, 0x45, 0xa5, 0x37, 0xd2, 0x06, 0x2b, 0x68, 0xd0, 0x41, 0x0d, 0xc2, 0xbf, 0x03, 0x48, 0xa8, + 0xa0, 0x11, 0x93, 0x4c, 0xa4, 0x16, 0xc1, 0x3c, 0x59, 0xd3, 0x79, 0xba, 0x28, 0x70, 0xa7, 0x64, + 0x4b, 0x7e, 0x05, 0x75, 0x55, 0x45, 0xd6, 0x2a, 0xfa, 0x90, 0x69, 0x1f, 0x55, 0x8c, 0x0e, 0xe2, + 0x2a, 0x25, 0x43, 0x46, 0x65, 0x26, 0x58, 0x6a, 0xad, 0xe9, 0xea, 0xcf, 0x65, 0x72, 0x08, 0xad, + 0xd4, 0x1b, 0xb1, 0x88, 0xa6, 0xd6, 0x3a, 0x86, 0x59, 0x9f, 0x0e, 0x73, 0xa9, 0x41, 0x27, 0xb7, + 0x52, 0x0e, 0x11, 0x93, 0x23, 0xee, 0xa7, 0xd6, 0x46, 0x95, 0xc3, 0x99, 0x06, 0x9d, 0xdc, 0x8a, + 0xfc, 0x04, 0x1d, 0xc1, 0x52, 0x9e, 0x09, 0x8f, 0xa5, 0xd6, 0x26, 0xba, 0x6c, 0x4e, 0xbb, 0x38, + 0x39, 0xec, 0x4c, 0x2c, 0x55, 0xed, 0x32, 0x49, 0xaf, 0x2d, 0x4b, 0xd7, 0xae, 0x5a, 0xab, 0x64, + 0xf3, 0x8f, 0x31, 0x13, 0xae, 0xcf, 0x23, 0x1a, 0xc4, 0xd6, 0x96, 0x4e, 0x36, 0xea, 0x4e, 0x50, + 0xa5, 0x92, 0xad, 0x4d, 0xb0, 0xf0, 0xb7, 0x75, 0xb2, 0x51, 0xf3, 0x4e, 0x55, 0xff, 0x77, 0xb0, + 0x64, 0xca, 0xdd, 0x8d, 0xb8, 0x9f, 0x85, 0xcc, 0x7a, 0xb6, 0x5b, 0xdb, 0x6b, 0x3b, 0x7d, 0xa3, + 0x3d, 0x43, 0xa5, 0x32, 0xf3, 0x68, 0xcc, 0xe3, 0xc0, 0xa3, 0xa1, 0x8e, 0xb4, 0x83, 0x91, 0xfa, + 0x85, 0x16, 0xa3, 0x9d, 0xc2, 0x37, 0xc3, 0x2c, 0x0c, 0xc7, 0x2e, 0x8b, 0x3d, 0xee, 0x33, 0x57, + 0x30, 0x75, 0xef, 0xcc, 0x77, 0xd9, 0x5d, 0x42, 0x63, 0x6c, 0xa4, 0xe7, 0x18, 0x7e, 0x07, 0xcd, + 0x4e, 0xd1, 0xca, 0x31, 0x46, 0xa7, 0xb9, 0x8d, 0x3a, 0x56, 0x42, 0xbd, 0x1b, 0x7a, 0x6d, 0x6a, + 0xe8, 0x17, 0xfa, 0x58, 0x46, 0xa7, 0x8a, 0xc4, 0xfe, 0x1e, 0x1a, 0xd8, 0x2c, 0x64, 0x00, 0x8b, + 0x77, 0x2f, 0x5e, 0x19, 0x8a, 0x50, 0x4b, 0xd4, 0xbc, 0x3c, 0x32, 0x9c, 0xa0, 0x96, 0xf6, 0x7f, + 0x6a, 0xd0, 0x3d, 0x63, 0x7e, 0x40, 0x3f, 0x24, 0x21, 0xa7, 0xbe, 0x6a, 0x15, 0xea, 0x79, 0x2c, + 0x91, 0x86, 0xfb, 0x8c, 0xa4, 0xca, 0x3a, 0xa2, 0x77, 0x6e, 0x1a, 0x7c, 0x62, 0xc6, 0xbd, 0x15, + 0xd1, 0xbb, 0xcb, 0xe0, 0x13, 0x53, 0x97, 0x96, 0x77, 0x4d, 0x8a, 0x84, 0xf2, 0xe0, 0xd2, 0x2e, + 0x72, 0xd8, 0x99, 0x58, 0x92, 0x97, 0xb0, 0x9e, 0x66, 0x49, 0xc2, 0x85, 0x4c, 0xdd, 0x34, 0xbb, + 0x9a, 0x50, 0x43, 0x1d, 0xd3, 0xb0, 0x96, 0x83, 0x97, 0x25, 0xcc, 0xfe, 0x6f, 0x03, 0x9a, 0xba, + 0x6a, 0x0c, 0x81, 0xd5, 0xca, 0x04, 0x86, 0x19, 0x31, 0x44, 0xab, 0xd6, 0xe4, 0x1b, 0xe8, 0x8e, + 0xa4, 0x4c, 0x5c, 0x5d, 0x5f, 0x86, 0xed, 0x40, 0xa9, 0x4c, 0x90, 0x7b, 0xac, 0x54, 0x7f, 0xc8, + 0x4a, 0xd3, 0x2d, 0xd7, 0xf8, 0x82, 0x96, 0xfb, 0x35, 0x2c, 0x17, 0x92, 0xcb, 0x85, 0xcf, 0x84, + 0xd5, 0xc4, 0x9c, 0x2e, 0x15, 0xea, 0x73, 0xa5, 0x55, 0x6d, 0xa2, 0x5e, 0x18, 0x96, 0x4a, 0xa4, + 0xcc, 0x07, 0x6d, 0xe2, 0x68, 0xd0, 0xc9, 0xad, 0xc8, 0x91, 0x62, 0xdf, 0x34, 0xe1, 0x71, 0xca, + 0x90, 0x48, 0xbb, 0x47, 0x1b, 0x0f, 0xba, 0x04, 0x51, 0xa7, 0xb0, 0x53, 0x17, 0x9b, 0x7a, 0x3c, + 0x61, 0x8a, 0x5e, 0xf1, 0x62, 0xb5, 0x44, 0x5e, 0xc1, 0x66, 0x71, 0x0d, 0x91, 0x2a, 0x04, 0xd7, + 0xe7, 0x1f, 0x63, 0x55, 0x0b, 0xc8, 0xa7, 0x6d, 0xa7, 0xb8, 0x25, 0x2c, 0x93, 0x13, 0x03, 0x92, + 0xa3, 0xd2, 0xf5, 0x69, 0xbf, 0x0c, 0x2b, 0xc8, 0xea, 0xa2, 0xd7, 0xea, 0x94, 0x97, 0x29, 0xae, + 0xd7, 0xb0, 0x9d, 0xa5, 0xec, 0xde, 0x67, 0x5c, 0xc3, 0x7f, 0xc8, 0xc0, 0x6d, 0x67, 0x33, 0x4b, + 0xd9, 0xd4, 0x97, 0x2e, 0x35, 0x4c, 0xfe, 0x00, 0xbd, 0xa9, 0xef, 0xf4, 0xf1, 0xe0, 0x5b, 0xf7, + 0x19, 0xa5, 0xf8, 0x9a, 0xd3, 0x8d, 0x4a, 0x9f, 0x9e, 0x59, 0x6d, 0x4b, 0xb3, 0xab, 0x4d, 0x11, + 0xfd, 0x30, 0xa4, 0x52, 0x77, 0x9a, 0x26, 0xf3, 0xb6, 0x52, 0x20, 0x17, 0x7f, 0x0b, 0x7d, 0x45, + 0x34, 0x6e, 0x31, 0x2c, 0x0c, 0x30, 0x52, 0x4f, 0x29, 0x9d, 0x7c, 0x60, 0xf8, 0x3b, 0xb4, 0x0c, + 0xc9, 0x91, 0x77, 0xb0, 0x4e, 0x7d, 0x3f, 0x50, 0x81, 0x69, 0xe8, 0x26, 0x82, 0x27, 0x4c, 0xc8, + 0x80, 0xa5, 0xd8, 0x68, 0x0f, 0x0e, 0xa2, 0x38, 0xc3, 0xd7, 0xae, 0xce, 0xda, 0xc4, 0xef, 0xa2, + 0x70, 0xb3, 0xcf, 0xa0, 0x5b, 0x32, 0x2a, 0xde, 0xef, 0x5a, 0xe9, 0xfd, 0x2e, 0xe6, 0x94, 0x85, + 0xaa, 0xf1, 0xc2, 0x44, 0xd7, 0x26, 0xf6, 0x25, 0x2c, 0x61, 0xb8, 0xa2, 0x98, 0x2b, 0x23, 0xfe, + 0x38, 0x1d, 0x71, 0x73, 0x46, 0x23, 0xe4, 0x41, 0xdf, 0x43, 0x1f, 0x83, 0xe6, 0xac, 0x5d, 0x19, + 0xf3, 0x87, 0xe9, 0x98, 0x1b, 0xd5, 0x84, 0x9f, 0x87, 0xcc, 0x8f, 0xad, 0x1f, 0x9b, 0x27, 0x1c, + 0x5b, 0x3b, 0xe6, 0xe1, 0xfe, 0x0c, 0x60, 0xc2, 0xf1, 0xa4, 0x7a, 0x7b, 0xfb, 0xd3, 0xd1, 0x56, + 0xef, 0x47, 0xe3, 0x49, 0xb1, 0xb7, 0x57, 0xd0, 0xd4, 0x33, 0x9b, 0x9a, 0xec, 0x4c, 0xb7, 0xd5, + 0xaa, 0xf7, 0xa0, 0xb0, 0xbc, 0x07, 0xed, 0xff, 0xd5, 0xa1, 0x33, 0xc9, 0x7b, 0x05, 0xb1, 0xc9, + 0x71, 0x92, 0xd3, 0x2e, 0xae, 0xc9, 0x0a, 0xd4, 0x5d, 0xc1, 0x86, 0x86, 0xd1, 0x16, 0x1d, 0x36, + 0x7c, 0x04, 0x95, 0x59, 0xd0, 0xf2, 0xd9, 0x90, 0x66, 0xa1, 0xcc, 0xc7, 0x39, 0x23, 0x4e, 0xcd, + 0xbc, 0x4d, 0x2c, 0xe3, 0x42, 0x56, 0xc4, 0x31, 0xe4, 0x22, 0xa2, 0xd2, 0xcc, 0x73, 0x46, 0x52, + 0xd1, 0x12, 0x2a, 0x25, 0x13, 0xf9, 0x30, 0x97, 0x8b, 0x0a, 0x89, 0x82, 0x38, 0x88, 0xb2, 0x08, + 0x47, 0x39, 0xf5, 0x54, 0x68, 0x11, 0x11, 0x7a, 0x87, 0x08, 0x14, 0x8f, 0x08, 0x22, 0xea, 0x09, + 0x8f, 0xb3, 0xc8, 0x0c, 0x68, 0xb8, 0x56, 0xf3, 0xac, 0xfa, 0xeb, 0x96, 0xce, 0x90, 0x5a, 0x3d, + 0x34, 0x18, 0x28, 0xe0, 0xa4, 0xa4, 0xd7, 0x47, 0x48, 0x18, 0x95, 0x4c, 0x53, 0x03, 0x1e, 0x41, + 0xcb, 0x0a, 0x0b, 0xb9, 0x47, 0x8b, 0x7e, 0xef, 0x38, 0x85, 0x4c, 0x7e, 0x02, 0x28, 0xf5, 0xe2, + 0xf2, 0xbc, 0xb9, 0xa6, 0x64, 0x48, 0xde, 0xce, 0xea, 0xe6, 0xc1, 0x9c, 0x9a, 0xab, 0x6c, 0x64, + 0x55, 0xae, 0x81, 0x64, 0x51, 0x8a, 0xe3, 0xde, 0xcc, 0x72, 0x45, 0x13, 0xf2, 0x1a, 0xba, 0x74, + 0xf2, 0x5b, 0xc5, 0x4c, 0x80, 0xf7, 0xa8, 0xa3, 0xf4, 0x63, 0xc6, 0x29, 0x5b, 0xdb, 0x2e, 0xc0, + 0xe4, 0xa9, 0x22, 0xef, 0xe7, 0xf3, 0xd1, 0x4e, 0x05, 0x1f, 0x4d, 0xfa, 0xbb, 0x9a, 0x92, 0x12, + 0xe8, 0x14, 0x4f, 0x3d, 0xb6, 0x40, 0x10, 0x25, 0x21, 0x9b, 0xd1, 0x02, 0x88, 0x39, 0xc6, 0xc6, + 0x4c, 0x7e, 0x59, 0x44, 0xaf, 0xc2, 0x19, 0xe4, 0xe2, 0xe4, 0xb0, 0x33, 0xb1, 0xb4, 0xff, 0x04, + 0x2d, 0xf3, 0x3a, 0x16, 0x2d, 0x51, 0x9b, 0xb4, 0xc4, 0x77, 0x30, 0x79, 0x6a, 0xf5, 0x68, 0xa6, + 0x7b, 0xa8, 0x5f, 0x68, 0xd5, 0xb1, 0x6c, 0x01, 0xed, 0x82, 0xa0, 0x4a, 0x23, 0x6b, 0xed, 0xcb, + 0x47, 0xd6, 0x85, 0xc7, 0x8e, 0xac, 0xf6, 0x3f, 0xa0, 0x53, 0xe8, 0xc9, 0xc5, 0xfc, 0xab, 0x78, + 0x56, 0x71, 0x15, 0x05, 0x2d, 0x56, 0xdf, 0xc4, 0x73, 0x3c, 0x92, 0x7e, 0xf9, 0x1f, 0x26, 0xc6, + 0xfe, 0x23, 0x7e, 0x5d, 0xe7, 0x90, 0xec, 0x40, 0x27, 0xca, 0x42, 0x19, 0x24, 0x54, 0x48, 0x34, + 0x6a, 0x3b, 0x13, 0x45, 0xd5, 0x58, 0x65, 0xff, 0xbf, 0x0e, 0x4d, 0xc3, 0xbf, 0x8f, 0x21, 0xab, + 0x7b, 0xcc, 0xb4, 0x38, 0x97, 0x99, 0xea, 0xb3, 0x99, 0xa9, 0x31, 0x93, 0x99, 0x9a, 0xb3, 0x98, + 0xa9, 0x35, 0x93, 0x99, 0xda, 0x33, 0x99, 0xa9, 0x53, 0xcd, 0x4c, 0xf0, 0x39, 0x66, 0xea, 0x3e, + 0x82, 0x99, 0x7a, 0x73, 0x98, 0xa9, 0x3f, 0x97, 0x99, 0x96, 0xbe, 0x9a, 0x99, 0x96, 0x9f, 0xce, + 0x4c, 0x83, 0xcf, 0x33, 0x53, 0x5e, 0x65, 0x2b, 0x93, 0xf6, 0xfb, 0x1a, 0xb2, 0x52, 0xb3, 0x97, + 0x60, 0xd4, 0x77, 0x79, 0x1c, 0x8e, 0xf1, 0x57, 0x2b, 0xa6, 0x8d, 0xfa, 0xe7, 0x71, 0x38, 0x56, + 0x63, 0x95, 0x39, 0xfa, 0x53, 0xc6, 0xaa, 0x79, 0x67, 0xb6, 0xf7, 0xa1, 0xa1, 0x67, 0x81, 0x7b, + 0x55, 0x5b, 0x7b, 0x50, 0xb5, 0xf6, 0xdf, 0x54, 0x17, 0xe0, 0x10, 0x7d, 0x36, 0x7f, 0x13, 0x56, + 0xe5, 0x26, 0xd4, 0xe0, 0x50, 0xbd, 0x87, 0xdf, 0x43, 0x53, 0xd3, 0xe3, 0x13, 0x7a, 0xf3, 0x5b, + 0xe8, 0x5e, 0x4a, 0x11, 0xc4, 0xd7, 0xc7, 0x42, 0xd0, 0x31, 0x59, 0x9b, 0xfc, 0xab, 0x4a, 0x55, + 0xa7, 0x16, 0xde, 0xec, 0xc1, 0x80, 0x8b, 0xeb, 0x83, 0xd2, 0x7f, 0x8b, 0x5e, 0xbc, 0xe9, 0x9d, + 0x27, 0x2c, 0x3e, 0xbe, 0x78, 0x8b, 0x0c, 0x7e, 0x51, 0xfb, 0xd7, 0xc2, 0xe2, 0xf9, 0xf1, 0xe5, + 0x55, 0x13, 0x7f, 0xb0, 0xbd, 0xfc, 0x39, 0x00, 0x00, 0xff, 0xff, 0x0c, 0xae, 0x3c, 0xab, 0x69, + 0x13, 0x00, 0x00, +} diff --git a/vendor/github.com/googleapis/gnostic/discovery/discovery.proto b/vendor/github.com/googleapis/gnostic/discovery/discovery.proto new file mode 100644 index 000000000..862ee8894 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/discovery/discovery.proto @@ -0,0 +1,265 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// THIS FILE IS AUTOMATICALLY GENERATED. + +syntax = "proto3"; + +package discovery.v1; + +import "google/protobuf/any.proto"; + +// This option lets the proto compiler generate Java code inside the package +// name (see below) instead of inside an outer class. It creates a simpler +// developer experience by reducing one-level of name nesting and be +// consistent with most programming languages that don't support outer classes. +option java_multiple_files = true; + +// The Java outer classname should be the filename in UpperCamelCase. This +// class is only used to hold proto descriptor, so developers don't need to +// work with it directly. +option java_outer_classname = "OpenAPIProto"; + +// The Java package name must be proto package name with proper prefix. +option java_package = "org.discovery_v1"; + +// A reasonable prefix for the Objective-C symbols generated from the package. +// It should at a minimum be 3 characters long, all uppercase, and convention +// is to use an abbreviation of the package name. Something short, but +// hopefully unique enough to not conflict with things that may come along in +// the future. 'GPB' is reserved for the protocol buffer implementation itself. +option objc_class_prefix = "OAS"; + +message Annotations { + repeated string required = 1; +} + +message Any { + google.protobuf.Any value = 1; + string yaml = 2; +} + +message Auth { + Oauth2 oauth2 = 1; +} + +message Document { + string kind = 1; + string discovery_version = 2; + string id = 3; + string name = 4; + string version = 5; + string revision = 6; + string title = 7; + string description = 8; + Icons icons = 9; + string documentation_link = 10; + repeated string labels = 11; + string protocol = 12; + string base_url = 13; + string base_path = 14; + string root_url = 15; + string service_path = 16; + string batch_path = 17; + Parameters parameters = 18; + Auth auth = 19; + repeated string features = 20; + Schemas schemas = 21; + Methods methods = 22; + Resources resources = 23; + string etag = 24; + string owner_domain = 25; + string owner_name = 26; + bool version_module = 27; + string canonical_name = 28; + bool fully_encode_reserved_expansion = 29; + string package_path = 30; +} + +// Icons that represent the API. +message Icons { + string x16 = 1; + string x32 = 2; +} + +message MediaUpload { + repeated string accept = 1; + string max_size = 2; + Protocols protocols = 3; + bool supports_subscription = 4; +} + +message Method { + string id = 1; + string path = 2; + string http_method = 3; + string description = 4; + Parameters parameters = 5; + repeated string parameter_order = 6; + Request request = 7; + Response response = 8; + repeated string scopes = 9; + bool supports_media_download = 10; + bool supports_media_upload = 11; + bool use_media_download_service = 12; + MediaUpload media_upload = 13; + bool supports_subscription = 14; + string flat_path = 15; + bool etag_required = 16; +} + +message Methods { + repeated NamedMethod additional_properties = 1; +} + +// Automatically-generated message used to represent maps of Method as ordered (name,value) pairs. +message NamedMethod { + // Map key + string name = 1; + // Mapped value + Method value = 2; +} + +// Automatically-generated message used to represent maps of Parameter as ordered (name,value) pairs. +message NamedParameter { + // Map key + string name = 1; + // Mapped value + Parameter value = 2; +} + +// Automatically-generated message used to represent maps of Resource as ordered (name,value) pairs. +message NamedResource { + // Map key + string name = 1; + // Mapped value + Resource value = 2; +} + +// Automatically-generated message used to represent maps of Schema as ordered (name,value) pairs. +message NamedSchema { + // Map key + string name = 1; + // Mapped value + Schema value = 2; +} + +// Automatically-generated message used to represent maps of Scope as ordered (name,value) pairs. +message NamedScope { + // Map key + string name = 1; + // Mapped value + Scope value = 2; +} + +message Oauth2 { + Scopes scopes = 1; +} + +message Parameter { + string id = 1; + string type = 2; + string _ref = 3; + string description = 4; + string default = 5; + bool required = 6; + string format = 7; + string pattern = 8; + string minimum = 9; + string maximum = 10; + repeated string enum = 11; + repeated string enum_descriptions = 12; + bool repeated = 13; + string location = 14; + Schemas properties = 15; + Schema additional_properties = 16; + Schema items = 17; + Annotations annotations = 18; +} + +message Parameters { + repeated NamedParameter additional_properties = 1; +} + +message Protocols { + Simple simple = 1; + Resumable resumable = 2; +} + +message Request { + string _ref = 1; + string parameter_name = 2; +} + +message Resource { + Methods methods = 1; + Resources resources = 2; +} + +message Resources { + repeated NamedResource additional_properties = 1; +} + +message Response { + string _ref = 1; +} + +message Resumable { + bool multipart = 1; + string path = 2; +} + +message Schema { + string id = 1; + string type = 2; + string description = 3; + string default = 4; + bool required = 5; + string format = 6; + string pattern = 7; + string minimum = 8; + string maximum = 9; + repeated string enum = 10; + repeated string enum_descriptions = 11; + bool repeated = 12; + string location = 13; + Schemas properties = 14; + Schema additional_properties = 15; + Schema items = 16; + string _ref = 17; + Annotations annotations = 18; + bool read_only = 19; +} + +message Schemas { + repeated NamedSchema additional_properties = 1; +} + +message Scope { + string description = 1; +} + +message Scopes { + repeated NamedScope additional_properties = 1; +} + +message Simple { + bool multipart = 1; + string path = 2; +} + +message StringArray { + repeated string value = 1; +} + diff --git a/vendor/github.com/googleapis/gnostic/discovery/discovery.yaml b/vendor/github.com/googleapis/gnostic/discovery/discovery.yaml new file mode 100644 index 000000000..9fa88b015 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/discovery/discovery.yaml @@ -0,0 +1,297 @@ +title: A JSON Schema for the Google API Discovery Format. +id: http://openapis.org/v3/schema.json# +$schema: http://json-schema.org/draft-04/schema# +type: object +description: This is the root document object of the Discovery Format. +required: +- kind +- discoveryVersion +additionalProperties: false +properties: + kind: + type: string + discoveryVersion: + type: string + id: + type: string + name: + type: string + version: + type: string + revision: + type: string + title: + type: string + description: + type: string + icons: + $ref: '#/definitions/icons' + documentationLink: + type: string + labels: + type: array + items: + type: string + protocol: + type: string + baseUrl: + type: string + basePath: + type: string + rootUrl: + type: string + servicePath: + type: string + batchPath: + type: string + parameters: + $ref: '#/definitions/parameters' + auth: + $ref: '#/definitions/auth' + features: + type: array + items: + type: string + schemas: + $ref: '#/definitions/schemas' + methods: + $ref: '#/definitions/methods' + resources: + $ref: '#/definitions/resources' + etag: + type: string + ownerDomain: + type: string + ownerName: + type: string + version_module: + type: boolean + canonicalName: + type: string + fullyEncodeReservedExpansion: + type: boolean + packagePath: + type: string +definitions: + icons: + type: object + description: Icons that represent the API. + required: + - x16 + - x32 + additionalProperties: false + properties: + x16: + type: string + x32: + type: string + parameters: + type: object + additionalProperties: + $ref: '#/definitions/parameter' + parameter: + type: object + properties: + id: + type: string + type: + type: string + $ref: + type: string + description: + type: string + default: + type: string + required: + type: boolean + format: + type: string + pattern: + type: string + minimum: + type: string + maximum: + type: string + enum: + type: array + items: + type: string + enumDescriptions: + type: array + items: + type: string + repeated: + type: boolean + location: + type: string + properties: + $ref: '#/definitions/schemas' + additionalProperties: + $ref: '#/definitions/schema' + items: + $ref: '#/definitions/schema' + annotations: + $ref: '#/definitions/annotations' + auth: + type: object + properties: + oauth2: + type: object + properties: + scopes: + $ref: '#/definitions/scopes' + schemas: + type: object + additionalProperties: + $ref: '#/definitions/schema' + schema: + type: object + properties: + id: + type: string + type: + type: string + description: + type: string + default: + type: string + required: + type: boolean + format: + type: string + pattern: + type: string + minimum: + type: string + maximum: + type: string + enum: + type: array + items: + type: string + enumDescriptions: + type: array + items: + type: string + repeated: + type: boolean + location: + type: string + properties: + $ref: '#/definitions/schemas' + additionalProperties: + $ref: '#/definitions/schema' + items: + $ref: '#/definitions/schema' + $ref: + type: string + annotations: + $ref: '#/definitions/annotations' + readOnly: + type: boolean + methods: + type: object + additionalProperties: + $ref: '#/definitions/method' + method: + type: object + properties: + id: + type: string + path: + type: string + httpMethod: + type: string + description: + type: string + parameters: + $ref: '#/definitions/parameters' + parameterOrder: + type: array + items: + type: string + request: + type: object + properties: + $ref: + type: string + parameterName: + type: string + response: + type: object + properties: + $ref: + type: string + scopes: + type: array + items: + type: string + supportsMediaDownload: + type: boolean + supportsMediaUpload: + type: boolean + useMediaDownloadService: + type: boolean + mediaUpload: + $ref: '#/definitions/mediaUpload' + supportsSubscription: + type: boolean + flatPath: + type: string + etagRequired: + type: boolean + resources: + type: object + additionalProperties: + $ref: '#/definitions/resource' + resource: + type: object + properties: + methods: + $ref: '#/definitions/methods' + resources: + $ref: '#/definitions/resources' + annotations: + type: object + properties: + required: + type: array + items: + type: string + scopes: + type: object + additionalProperties: + $ref: '#/definitions/scope' + scope: + type: object + properties: + description: + type: string + mediaUpload: + type: object + properties: + accept: + type: array + items: + type: string + maxSize: + type: string + protocols: + type: object + properties: + simple: + type: object + properties: + multipart: + type: boolean + path: + type: string + resumable: + type: object + properties: + multipart: + type: boolean + path: + type: string + supportsSubscription: + type: boolean diff --git a/vendor/github.com/googleapis/gnostic/examples/README.md b/vendor/github.com/googleapis/gnostic/examples/README.md new file mode 100644 index 000000000..987960000 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/examples/README.md @@ -0,0 +1,3 @@ +# examples + +This directory contains example descriptions of APIs. diff --git a/vendor/github.com/googleapis/gnostic/examples/errors/petstore-badproperties.yaml b/vendor/github.com/googleapis/gnostic/examples/errors/petstore-badproperties.yaml new file mode 100644 index 000000000..2302a706c --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/examples/errors/petstore-badproperties.yaml @@ -0,0 +1,101 @@ +swagger: "2.0" +info: + title: Swagger Petstore + myproperty: 123 + license: + name: MIT +host: petstore.swagger.io +basePath: /v1 +schemes: + - http +consumes: + - application/json +produces: + - application/json +paths: + /pets: + get: + summary: List all pets + operationId: listPets + tags: + - pets + parameters: + - name: limit + in: query + description: How many items to return at one time (max 100) + required: false + type: integer + format: int32 + myproperty: 123 + responses: + "200": + description: An paged array of pets + headers: + x-next: + type: string + description: A link to the next page of responses + schema: + $ref: '#/definitions/Pets' + default: + description: unexpected error + schema: + $ref: '#/definitions/Error' + post: + summary: Create a pet + operationId: createPets + tags: pets + responses: + "201": + description: Null response + default: + description: unexpected error + schema: + $ref: '#/definitions/Error' + /pets/{petId}: + get: + summary: Info for a specific pet + operationId: showPetById + tags: + - pets + parameters: + - name: petId + in: path + required: true + description: The id of the pet to retrieve + type: string + responses: + "200": + description: Expected response to a valid request + schema: + $ref: '#/definitions/Pets' + default: + description: unexpected error + schema: + $ref: '#/definitions/Error' +definitions: + Pet: + required: + - id + - name + properties: + id: + type: integer + format: int64 + name: + type: string + tag: + type: string + Pets: + type: array + items: + $ref: '#/definitions/Pet' + Error: + required: + - code + - message + properties: + code: + type: integer + format: int32 + message: + type: string diff --git a/vendor/github.com/googleapis/gnostic/examples/errors/petstore-missingversion.yaml b/vendor/github.com/googleapis/gnostic/examples/errors/petstore-missingversion.yaml new file mode 100644 index 000000000..881722c99 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/examples/errors/petstore-missingversion.yaml @@ -0,0 +1,100 @@ +info: + version: 1.0.0 + title: Swagger Petstore + license: + name: MIT +host: petstore.swagger.io +basePath: /v1 +schemes: + - http +consumes: + - application/json +produces: + - application/json +paths: + /pets: + get: + summary: List all pets + operationId: listPets + tags: + - pets + parameters: + - name: limit + in: query + description: How many items to return at one time (max 100) + required: false + type: integer + format: int32 + responses: + "200": + description: An paged array of pets + headers: + x-next: + type: string + description: A link to the next page of responses + schema: + $ref: '#/definitions/Pets' + default: + description: unexpected error + schema: + $ref: '#/definitions/Error' + post: + summary: Create a pet + operationId: createPets + tags: + - pets + responses: + "201": + description: Null response + default: + description: unexpected error + schema: + $ref: '#/definitions/Error' + /pets/{petId}: + get: + summary: Info for a specific pet + operationId: showPetById + tags: + - pets + parameters: + - name: petId + in: path + required: true + description: The id of the pet to retrieve + type: string + responses: + "200": + description: Expected response to a valid request + schema: + $ref: '#/definitions/Pets' + default: + description: unexpected error + schema: + $ref: '#/definitions/Error' +definitions: + Pet: + required: + - id + - name + properties: + id: + type: integer + format: int64 + name: + type: string + tag: + type: string + Pets: + type: array + items: + $ref: '#/definitions/Pet' + Error: + required: + - code + - message + properties: + code: + type: integer + format: int32 + message: + type: string diff --git a/vendor/github.com/googleapis/gnostic/examples/errors/petstore-unresolvedrefs.yaml b/vendor/github.com/googleapis/gnostic/examples/errors/petstore-unresolvedrefs.yaml new file mode 100644 index 000000000..59c81e8de --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/examples/errors/petstore-unresolvedrefs.yaml @@ -0,0 +1,101 @@ +swagger: "2.0" +info: + version: 1.0.0 + title: Swagger Petstore + license: + name: MIT +host: petstore.swagger.io +basePath: /v1 +schemes: + - http +consumes: + - application/json +produces: + - application/json +paths: + /pets: + get: + summary: List all pets + operationId: listPets + tags: + - pets + parameters: + - name: limit + in: query + description: How many items to return at one time (max 100) + required: false + type: integer + format: int32 + responses: + "200": + description: An paged array of pets + headers: + x-next: + type: string + description: A link to the next page of responses + schema: + $ref: '#/definitions/Pets' + default: + description: unexpected error + schema: + $ref: '#/definitions/Error' + post: + summary: Create a pet + operationId: createPets + tags: + - pets + responses: + "201": + description: Null response + default: + description: unexpected error + schema: + $ref: '#/definitions/Error' + /pets/{petId}: + get: + summary: Info for a specific pet + operationId: showPetById + tags: + - pets + parameters: + - name: petId + in: path + required: true + description: The id of the pet to retrieve + type: string + responses: + "200": + description: Expected response to a valid request + schema: + $ref: '#/definitions/Pets' + default: + description: unexpected error + schema: + $ref: '#/definitions/Error' +definitions: + NotAPet: + required: + - id + - name + properties: + id: + type: integer + format: int64 + name: + type: string + tag: + type: string + Pets: + type: array + items: + $ref: '#/definitions/Pet' + NotAnError: + required: + - code + - message + properties: + code: + type: integer + format: int32 + message: + type: string diff --git a/vendor/github.com/googleapis/gnostic/examples/v2.0/json/api-with-examples.json b/vendor/github.com/googleapis/gnostic/examples/v2.0/json/api-with-examples.json new file mode 100644 index 000000000..e1b371a07 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/examples/v2.0/json/api-with-examples.json @@ -0,0 +1,58 @@ +{ + "swagger": "2.0", + "info": { + "title": "Simple API overview", + "version": "v2" + }, + "paths": { + "/": { + "get": { + "operationId": "listVersionsv2", + "summary": "List API versions", + "produces": [ + "application/json" + ], + "responses": { + "200": { + "description": "200 300 response", + "examples": { + "application/json": "{\n \"versions\": [\n {\n \"status\": \"CURRENT\",\n \"updated\": \"2011-01-21T11:33:21Z\",\n \"id\": \"v2.0\",\n \"links\": [\n {\n \"href\": \"http://127.0.0.1:8774/v2/\",\n \"rel\": \"self\"\n }\n ]\n },\n {\n \"status\": \"EXPERIMENTAL\",\n \"updated\": \"2013-07-23T11:33:21Z\",\n \"id\": \"v3.0\",\n \"links\": [\n {\n \"href\": \"http://127.0.0.1:8774/v3/\",\n \"rel\": \"self\"\n }\n ]\n }\n ]\n}" + } + }, + "300": { + "description": "200 300 response", + "examples": { + "application/json": "{\n \"versions\": [\n {\n \"status\": \"CURRENT\",\n \"updated\": \"2011-01-21T11:33:21Z\",\n \"id\": \"v2.0\",\n \"links\": [\n {\n \"href\": \"http://127.0.0.1:8774/v2/\",\n \"rel\": \"self\"\n }\n ]\n },\n {\n \"status\": \"EXPERIMENTAL\",\n \"updated\": \"2013-07-23T11:33:21Z\",\n \"id\": \"v3.0\",\n \"links\": [\n {\n \"href\": \"http://127.0.0.1:8774/v3/\",\n \"rel\": \"self\"\n }\n ]\n }\n ]\n}" + } + } + } + } + }, + "/v2": { + "get": { + "operationId": "getVersionDetailsv2", + "summary": "Show API version details", + "produces": [ + "application/json" + ], + "responses": { + "200": { + "description": "200 203 response", + "examples": { + "application/json": "{\n \"version\": {\n \"status\": \"CURRENT\",\n \"updated\": \"2011-01-21T11:33:21Z\",\n \"media-types\": [\n {\n \"base\": \"application/xml\",\n \"type\": \"application/vnd.openstack.compute+xml;version=2\"\n },\n {\n \"base\": \"application/json\",\n \"type\": \"application/vnd.openstack.compute+json;version=2\"\n }\n ],\n \"id\": \"v2.0\",\n \"links\": [\n {\n \"href\": \"http://127.0.0.1:8774/v2/\",\n \"rel\": \"self\"\n },\n {\n \"href\": \"http://docs.openstack.org/api/openstack-compute/2/os-compute-devguide-2.pdf\",\n \"type\": \"application/pdf\",\n \"rel\": \"describedby\"\n },\n {\n \"href\": \"http://docs.openstack.org/api/openstack-compute/2/wadl/os-compute-2.wadl\",\n \"type\": \"application/vnd.sun.wadl+xml\",\n \"rel\": \"describedby\"\n },\n {\n \"href\": \"http://docs.openstack.org/api/openstack-compute/2/wadl/os-compute-2.wadl\",\n \"type\": \"application/vnd.sun.wadl+xml\",\n \"rel\": \"describedby\"\n }\n ]\n }\n}" + } + }, + "203": { + "description": "200 203 response", + "examples": { + "application/json": "{\n \"version\": {\n \"status\": \"CURRENT\",\n \"updated\": \"2011-01-21T11:33:21Z\",\n \"media-types\": [\n {\n \"base\": \"application/xml\",\n \"type\": \"application/vnd.openstack.compute+xml;version=2\"\n },\n {\n \"base\": \"application/json\",\n \"type\": \"application/vnd.openstack.compute+json;version=2\"\n }\n ],\n \"id\": \"v2.0\",\n \"links\": [\n {\n \"href\": \"http://23.253.228.211:8774/v2/\",\n \"rel\": \"self\"\n },\n {\n \"href\": \"http://docs.openstack.org/api/openstack-compute/2/os-compute-devguide-2.pdf\",\n \"type\": \"application/pdf\",\n \"rel\": \"describedby\"\n },\n {\n \"href\": \"http://docs.openstack.org/api/openstack-compute/2/wadl/os-compute-2.wadl\",\n \"type\": \"application/vnd.sun.wadl+xml\",\n \"rel\": \"describedby\"\n }\n ]\n }\n}" + } + } + } + } + } + }, + "consumes": [ + "application/json" + ] +} \ No newline at end of file diff --git a/vendor/github.com/googleapis/gnostic/examples/v2.0/json/petstore-expanded.json b/vendor/github.com/googleapis/gnostic/examples/v2.0/json/petstore-expanded.json new file mode 100644 index 000000000..0c59f9a48 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/examples/v2.0/json/petstore-expanded.json @@ -0,0 +1,210 @@ +{ + "swagger": "2.0", + "info": { + "version": "1.0.0", + "title": "Swagger Petstore", + "description": "A sample API that uses a petstore as an example to demonstrate features in the swagger-2.0 specification", + "termsOfService": "http://swagger.io/terms/", + "contact": { + "name": "Swagger API Team", + "email": "foo@example.com", + "url": "http://madskristensen.net" + }, + "license": { + "name": "MIT", + "url": "http://github.com/gruntjs/grunt/blob/master/LICENSE-MIT" + } + }, + "host": "petstore.swagger.io", + "basePath": "/api", + "schemes": [ + "http" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "paths": { + "/pets": { + "get": { + "description": "Returns all pets from the system that the user has access to\nNam sed condimentum est. Maecenas tempor sagittis sapien, nec rhoncus sem sagittis sit amet. Aenean at gravida augue, ac iaculis sem. Curabitur odio lorem, ornare eget elementum nec, cursus id lectus. Duis mi turpis, pulvinar ac eros ac, tincidunt varius justo. In hac habitasse platea dictumst. Integer at adipiscing ante, a sagittis ligula. Aenean pharetra tempor ante molestie imperdiet. Vivamus id aliquam diam. Cras quis velit non tortor eleifend sagittis. Praesent at enim pharetra urna volutpat venenatis eget eget mauris. In eleifend fermentum facilisis. Praesent enim enim, gravida ac sodales sed, placerat id erat. Suspendisse lacus dolor, consectetur non augue vel, vehicula interdum libero. Morbi euismod sagittis libero sed lacinia.\n\nSed tempus felis lobortis leo pulvinar rutrum. Nam mattis velit nisl, eu condimentum ligula luctus nec. Phasellus semper velit eget aliquet faucibus. In a mattis elit. Phasellus vel urna viverra, condimentum lorem id, rhoncus nibh. Ut pellentesque posuere elementum. Sed a varius odio. Morbi rhoncus ligula libero, vel eleifend nunc tristique vitae. Fusce et sem dui. Aenean nec scelerisque tortor. Fusce malesuada accumsan magna vel tempus. Quisque mollis felis eu dolor tristique, sit amet auctor felis gravida. Sed libero lorem, molestie sed nisl in, accumsan tempor nisi. Fusce sollicitudin massa ut lacinia mattis. Sed vel eleifend lorem. Pellentesque vitae felis pretium, pulvinar elit eu, euismod sapien.\n", + "operationId": "findPets", + "parameters": [ + { + "name": "tags", + "in": "query", + "description": "tags to filter by", + "required": false, + "type": "array", + "collectionFormat": "csv", + "items": { + "type": "string" + } + }, + { + "name": "limit", + "in": "query", + "description": "maximum number of results to return", + "required": false, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "pet response", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/Pet" + } + } + }, + "default": { + "description": "unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + }, + "post": { + "description": "Creates a new pet in the store. Duplicates are allowed", + "operationId": "addPet", + "parameters": [ + { + "name": "pet", + "in": "body", + "description": "Pet to add to the store", + "required": true, + "schema": { + "$ref": "#/definitions/NewPet" + } + } + ], + "responses": { + "200": { + "description": "pet response", + "schema": { + "$ref": "#/definitions/Pet" + } + }, + "default": { + "description": "unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/pets/{id}": { + "get": { + "description": "Returns a user based on a single ID, if the user does not have access to the pet", + "operationId": "find pet by id", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "ID of pet to fetch", + "required": true, + "type": "integer", + "format": "int64" + } + ], + "responses": { + "200": { + "description": "pet response", + "schema": { + "$ref": "#/definitions/Pet" + } + }, + "default": { + "description": "unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + }, + "delete": { + "description": "deletes a single pet based on the ID supplied", + "operationId": "deletePet", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "ID of pet to delete", + "required": true, + "type": "integer", + "format": "int64" + } + ], + "responses": { + "204": { + "description": "pet deleted" + }, + "default": { + "description": "unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + } + }, + "definitions": { + "Pet": { + "type": "object", + "allOf": [ + { + "$ref": "#/definitions/NewPet" + }, + { + "required": [ + "id" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + } + } + } + ] + }, + "NewPet": { + "type": "object", + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string" + }, + "tag": { + "type": "string" + } + } + }, + "Error": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + } + } + } + } +} \ No newline at end of file diff --git a/vendor/github.com/googleapis/gnostic/examples/v2.0/json/petstore-minimal.json b/vendor/github.com/googleapis/gnostic/examples/v2.0/json/petstore-minimal.json new file mode 100644 index 000000000..0c70baed7 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/examples/v2.0/json/petstore-minimal.json @@ -0,0 +1,68 @@ +{ + "swagger": "2.0", + "info": { + "version": "1.0.0", + "title": "Swagger Petstore", + "description": "A sample API that uses a petstore as an example to demonstrate features in the swagger-2.0 specification", + "termsOfService": "http://swagger.io/terms/", + "contact": { + "name": "Swagger API Team" + }, + "license": { + "name": "MIT" + } + }, + "host": "petstore.swagger.io", + "basePath": "/api", + "schemes": [ + "http" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "paths": { + "/pets": { + "get": { + "description": "Returns all pets from the system that the user has access to", + "produces": [ + "application/json" + ], + "responses": { + "200": { + "description": "A list of pets.", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/Pet" + } + } + } + } + } + } + }, + "definitions": { + "Pet": { + "type": "object", + "required": [ + "id", + "name" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "name": { + "type": "string" + }, + "tag": { + "type": "string" + } + } + } + } +} \ No newline at end of file diff --git a/vendor/github.com/googleapis/gnostic/examples/v2.0/json/petstore-separate/common/Error.json b/vendor/github.com/googleapis/gnostic/examples/v2.0/json/petstore-separate/common/Error.json new file mode 100644 index 000000000..dd0e65a0f --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/examples/v2.0/json/petstore-separate/common/Error.json @@ -0,0 +1,16 @@ +{ + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + } + } +} \ No newline at end of file diff --git a/vendor/github.com/googleapis/gnostic/examples/v2.0/json/petstore-separate/spec/NewPet.json b/vendor/github.com/googleapis/gnostic/examples/v2.0/json/petstore-separate/spec/NewPet.json new file mode 100644 index 000000000..9104f7f68 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/examples/v2.0/json/petstore-separate/spec/NewPet.json @@ -0,0 +1,19 @@ +{ + "type": "object", + "allOf": [ + { + "$ref": "Pet.json" + }, + { + "required": [ + "name" + ], + "properties": { + "description": { + "type": "integer", + "format": "int64" + } + } + } + ] +} \ No newline at end of file diff --git a/vendor/github.com/googleapis/gnostic/examples/v2.0/json/petstore-separate/spec/Pet.json b/vendor/github.com/googleapis/gnostic/examples/v2.0/json/petstore-separate/spec/Pet.json new file mode 100644 index 000000000..c7ee9fbb0 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/examples/v2.0/json/petstore-separate/spec/Pet.json @@ -0,0 +1,19 @@ +{ + "type": "object", + "required": [ + "id", + "name" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "name": { + "type": "string" + }, + "tag": { + "type": "string" + } + } +} \ No newline at end of file diff --git a/vendor/github.com/googleapis/gnostic/examples/v2.0/json/petstore-separate/spec/parameters.json b/vendor/github.com/googleapis/gnostic/examples/v2.0/json/petstore-separate/spec/parameters.json new file mode 100644 index 000000000..a7c11b0a0 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/examples/v2.0/json/petstore-separate/spec/parameters.json @@ -0,0 +1,21 @@ +{ + "tagsParam": { + "name": "tags", + "in": "query", + "description": "tags to filter by", + "required": false, + "type": "array", + "collectionFormat": "csv", + "items": { + "type": "string" + } + }, + "limitsParam": { + "name": "limit", + "in": "query", + "description": "maximum number of results to return", + "required": false, + "type": "integer", + "format": "int32" + } +} \ No newline at end of file diff --git a/vendor/github.com/googleapis/gnostic/examples/v2.0/json/petstore-separate/spec/swagger.json b/vendor/github.com/googleapis/gnostic/examples/v2.0/json/petstore-separate/spec/swagger.json new file mode 100644 index 000000000..542b76fc2 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/examples/v2.0/json/petstore-separate/spec/swagger.json @@ -0,0 +1,146 @@ +{ + "swagger": "2.0", + "info": { + "version": "1.0.0", + "title": "Swagger Petstore", + "description": "A sample API that uses a petstore as an example to demonstrate features in the swagger-2.0 specification", + "termsOfService": "http://helloreverb.com/terms/", + "contact": { + "name": "Wordnik API Team", + "email": "foo@example.com", + "url": "http://madskristensen.net" + }, + "license": { + "name": "MIT", + "url": "http://github.com/gruntjs/grunt/blob/master/LICENSE-MIT" + } + }, + "host": "petstore.swagger.wordnik.com", + "basePath": "/api", + "schemes": [ + "http" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "paths": { + "/pets": { + "get": { + "description": "Returns all pets from the system that the user has access to\nNam sed condimentum est. Maecenas tempor sagittis sapien, nec rhoncus sem sagittis sit amet. Aenean at gravida augue, ac iaculis sem. Curabitur odio lorem, ornare eget elementum nec, cursus id lectus. Duis mi turpis, pulvinar ac eros ac, tincidunt varius justo. In hac habitasse platea dictumst. Integer at adipiscing ante, a sagittis ligula. Aenean pharetra tempor ante molestie imperdiet. Vivamus id aliquam diam. Cras quis velit non tortor eleifend sagittis. Praesent at enim pharetra urna volutpat venenatis eget eget mauris. In eleifend fermentum facilisis. Praesent enim enim, gravida ac sodales sed, placerat id erat. Suspendisse lacus dolor, consectetur non augue vel, vehicula interdum libero. Morbi euismod sagittis libero sed lacinia.\n\nSed tempus felis lobortis leo pulvinar rutrum. Nam mattis velit nisl, eu condimentum ligula luctus nec. Phasellus semper velit eget aliquet faucibus. In a mattis elit. Phasellus vel urna viverra, condimentum lorem id, rhoncus nibh. Ut pellentesque posuere elementum. Sed a varius odio. Morbi rhoncus ligula libero, vel eleifend nunc tristique vitae. Fusce et sem dui. Aenean nec scelerisque tortor. Fusce malesuada accumsan magna vel tempus. Quisque mollis felis eu dolor tristique, sit amet auctor felis gravida. Sed libero lorem, molestie sed nisl in, accumsan tempor nisi. Fusce sollicitudin massa ut lacinia mattis. Sed vel eleifend lorem. Pellentesque vitae felis pretium, pulvinar elit eu, euismod sapien.\n", + "operationId": "findPets", + "parameters": [ + { + "$ref": "parameters.json#/tagsParam" + }, + { + "$ref": "parameters.json#/limitsParam" + } + ], + "responses": { + "200": { + "description": "pet response", + "schema": { + "type": "array", + "items": { + "$ref": "Pet.json" + } + } + }, + "default": { + "description": "unexpected error", + "schema": { + "$ref": "../common/Error.json" + } + } + } + }, + "post": { + "description": "Creates a new pet in the store. Duplicates are allowed", + "operationId": "addPet", + "parameters": [ + { + "name": "pet", + "in": "body", + "description": "Pet to add to the store", + "required": true, + "schema": { + "$ref": "NewPet.json" + } + } + ], + "responses": { + "200": { + "description": "pet response", + "schema": { + "$ref": "Pet.json" + } + }, + "default": { + "description": "unexpected error", + "schema": { + "$ref": "../common/Error.json" + } + } + } + } + }, + "/pets/{id}": { + "get": { + "description": "Returns a user based on a single ID, if the user does not have access to the pet", + "operationId": "find pet by id", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "ID of pet to fetch", + "required": true, + "type": "integer", + "format": "int64" + } + ], + "responses": { + "200": { + "description": "pet response", + "schema": { + "$ref": "Pet.json" + } + }, + "default": { + "description": "unexpected error", + "schema": { + "$ref": "../common/Error.json" + } + } + } + }, + "delete": { + "description": "deletes a single pet based on the ID supplied", + "operationId": "deletePet", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "ID of pet to delete", + "required": true, + "type": "integer", + "format": "int64" + } + ], + "responses": { + "204": { + "description": "pet deleted" + }, + "default": { + "description": "unexpected error", + "schema": { + "$ref": "../common/Error.json" + } + } + } + } + } + } +} \ No newline at end of file diff --git a/vendor/github.com/googleapis/gnostic/examples/v2.0/json/petstore-simple.json b/vendor/github.com/googleapis/gnostic/examples/v2.0/json/petstore-simple.json new file mode 100644 index 000000000..306dc90c9 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/examples/v2.0/json/petstore-simple.json @@ -0,0 +1,222 @@ +{ + "swagger": "2.0", + "info": { + "version": "1.0.0", + "title": "Swagger Petstore", + "description": "A sample API that uses a petstore as an example to demonstrate features in the swagger-2.0 specification", + "termsOfService": "http://swagger.io/terms/", + "contact": { + "name": "Swagger API Team" + }, + "license": { + "name": "MIT" + } + }, + "host": "petstore.swagger.io", + "basePath": "/api", + "schemes": [ + "http" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "paths": { + "/pets": { + "get": { + "description": "Returns all pets from the system that the user has access to", + "operationId": "findPets", + "produces": [ + "application/json", + "application/xml", + "text/xml", + "text/html" + ], + "parameters": [ + { + "name": "tags", + "in": "query", + "description": "tags to filter by", + "required": false, + "type": "array", + "items": { + "type": "string" + }, + "collectionFormat": "csv" + }, + { + "name": "limit", + "in": "query", + "description": "maximum number of results to return", + "required": false, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "pet response", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/Pet" + } + } + }, + "default": { + "description": "unexpected error", + "schema": { + "$ref": "#/definitions/ErrorModel" + } + } + } + }, + "post": { + "description": "Creates a new pet in the store. Duplicates are allowed", + "operationId": "addPet", + "produces": [ + "application/json" + ], + "parameters": [ + { + "name": "pet", + "in": "body", + "description": "Pet to add to the store", + "required": true, + "schema": { + "$ref": "#/definitions/NewPet" + } + } + ], + "responses": { + "200": { + "description": "pet response", + "schema": { + "$ref": "#/definitions/Pet" + } + }, + "default": { + "description": "unexpected error", + "schema": { + "$ref": "#/definitions/ErrorModel" + } + } + } + } + }, + "/pets/{id}": { + "get": { + "description": "Returns a user based on a single ID, if the user does not have access to the pet", + "operationId": "findPetById", + "produces": [ + "application/json", + "application/xml", + "text/xml", + "text/html" + ], + "parameters": [ + { + "name": "id", + "in": "path", + "description": "ID of pet to fetch", + "required": true, + "type": "integer", + "format": "int64" + } + ], + "responses": { + "200": { + "description": "pet response", + "schema": { + "$ref": "#/definitions/Pet" + } + }, + "default": { + "description": "unexpected error", + "schema": { + "$ref": "#/definitions/ErrorModel" + } + } + } + }, + "delete": { + "description": "deletes a single pet based on the ID supplied", + "operationId": "deletePet", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "ID of pet to delete", + "required": true, + "type": "integer", + "format": "int64" + } + ], + "responses": { + "204": { + "description": "pet deleted" + }, + "default": { + "description": "unexpected error", + "schema": { + "$ref": "#/definitions/ErrorModel" + } + } + } + } + } + }, + "definitions": { + "Pet": { + "type": "object", + "allOf": [ + { + "$ref": "#/definitions/NewPet" + }, + { + "required": [ + "id" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + } + } + } + ] + }, + "NewPet": { + "type": "object", + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string" + }, + "tag": { + "type": "string" + } + } + }, + "ErrorModel": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + } + } + } + } +} \ No newline at end of file diff --git a/vendor/github.com/googleapis/gnostic/examples/v2.0/json/petstore-with-external-docs.json b/vendor/github.com/googleapis/gnostic/examples/v2.0/json/petstore-with-external-docs.json new file mode 100644 index 000000000..4c5154c23 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/examples/v2.0/json/petstore-with-external-docs.json @@ -0,0 +1,233 @@ +{ + "swagger": "2.0", + "info": { + "version": "1.0.0", + "title": "Swagger Petstore", + "description": "A sample API that uses a petstore as an example to demonstrate features in the swagger-2.0 specification", + "termsOfService": "http://swagger.io/terms/", + "contact": { + "name": "Swagger API Team", + "email": "apiteam@swagger.io", + "url": "http://swagger.io" + }, + "license": { + "name": "MIT", + "url": "http://github.com/gruntjs/grunt/blob/master/LICENSE-MIT" + } + }, + "externalDocs": { + "description": "find more info here", + "url": "https://swagger.io/about" + }, + "host": "petstore.swagger.io", + "basePath": "/api", + "schemes": [ + "http" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "paths": { + "/pets": { + "get": { + "description": "Returns all pets from the system that the user has access to", + "operationId": "findPets", + "externalDocs": { + "description": "find more info here", + "url": "https://swagger.io/about" + }, + "produces": [ + "application/json", + "application/xml", + "text/xml", + "text/html" + ], + "parameters": [ + { + "name": "tags", + "in": "query", + "description": "tags to filter by", + "required": false, + "type": "array", + "items": { + "type": "string" + }, + "collectionFormat": "csv" + }, + { + "name": "limit", + "in": "query", + "description": "maximum number of results to return", + "required": false, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "pet response", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/Pet" + } + } + }, + "default": { + "description": "unexpected error", + "schema": { + "$ref": "#/definitions/ErrorModel" + } + } + } + }, + "post": { + "description": "Creates a new pet in the store. Duplicates are allowed", + "operationId": "addPet", + "produces": [ + "application/json" + ], + "parameters": [ + { + "name": "pet", + "in": "body", + "description": "Pet to add to the store", + "required": true, + "schema": { + "$ref": "#/definitions/NewPet" + } + } + ], + "responses": { + "200": { + "description": "pet response", + "schema": { + "$ref": "#/definitions/Pet" + } + }, + "default": { + "description": "unexpected error", + "schema": { + "$ref": "#/definitions/ErrorModel" + } + } + } + } + }, + "/pets/{id}": { + "get": { + "description": "Returns a user based on a single ID, if the user does not have access to the pet", + "operationId": "findPetById", + "produces": [ + "application/json", + "application/xml", + "text/xml", + "text/html" + ], + "parameters": [ + { + "name": "id", + "in": "path", + "description": "ID of pet to fetch", + "required": true, + "type": "integer", + "format": "int64" + } + ], + "responses": { + "200": { + "description": "pet response", + "schema": { + "$ref": "#/definitions/Pet" + } + }, + "default": { + "description": "unexpected error", + "schema": { + "$ref": "#/definitions/ErrorModel" + } + } + } + }, + "delete": { + "description": "deletes a single pet based on the ID supplied", + "operationId": "deletePet", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "ID of pet to delete", + "required": true, + "type": "integer", + "format": "int64" + } + ], + "responses": { + "204": { + "description": "pet deleted" + }, + "default": { + "description": "unexpected error", + "schema": { + "$ref": "#/definitions/ErrorModel" + } + } + } + } + } + }, + "definitions": { + "Pet": { + "type": "object", + "allOf": [ + { + "$ref": "#/definitions/NewPet" + }, + { + "required": [ + "id" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + } + } + } + ] + }, + "NewPet": { + "type": "object", + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string" + }, + "tag": { + "type": "string" + } + } + }, + "ErrorModel": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + } + } + } + } +} \ No newline at end of file diff --git a/vendor/github.com/googleapis/gnostic/examples/v2.0/json/petstore.json b/vendor/github.com/googleapis/gnostic/examples/v2.0/json/petstore.json new file mode 100644 index 000000000..415eb3f9a --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/examples/v2.0/json/petstore.json @@ -0,0 +1,153 @@ +{ + "swagger": "2.0", + "info": { + "version": "1.0.0", + "title": "Swagger Petstore", + "license": { + "name": "MIT" + } + }, + "host": "petstore.swagger.io", + "basePath": "/v1", + "schemes": [ + "http" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "paths": { + "/pets": { + "get": { + "summary": "List all pets", + "operationId": "listPets", + "tags": [ + "pets" + ], + "parameters": [ + { + "name": "limit", + "in": "query", + "description": "How many items to return at one time (max 100)", + "required": false, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "An paged array of pets", + "headers": { + "x-next": { + "type": "string", + "description": "A link to the next page of responses" + } + }, + "schema": { + "$ref": "#/definitions/Pets" + } + }, + "default": { + "description": "unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + }, + "post": { + "summary": "Create a pet", + "operationId": "createPets", + "tags": [ + "pets" + ], + "responses": { + "201": { + "description": "Null response" + }, + "default": { + "description": "unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/pets/{petId}": { + "get": { + "summary": "Info for a specific pet", + "operationId": "showPetById", + "tags": [ + "pets" + ], + "parameters": [ + { + "name": "petId", + "in": "path", + "required": true, + "description": "The id of the pet to retrieve", + "type": "string" + } + ], + "responses": { + "200": { + "description": "Expected response to a valid request", + "schema": { + "$ref": "#/definitions/Pets" + } + }, + "default": { + "description": "unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + } + }, + "definitions": { + "Pet": { + "required": [ + "id", + "name" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "name": { + "type": "string" + }, + "tag": { + "type": "string" + } + } + }, + "Pets": { + "type": "array", + "items": { + "$ref": "#/definitions/Pet" + } + }, + "Error": { + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + } + } + } + } +} \ No newline at end of file diff --git a/vendor/github.com/googleapis/gnostic/examples/v2.0/json/uber.json b/vendor/github.com/googleapis/gnostic/examples/v2.0/json/uber.json new file mode 100644 index 000000000..957782897 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/examples/v2.0/json/uber.json @@ -0,0 +1,370 @@ +{ + "swagger": "2.0", + "info": { + "title": "Uber API", + "description": "Move your app forward with the Uber API", + "version": "1.0.0" + }, + "host": "api.uber.com", + "schemes": [ + "https" + ], + "basePath": "/v1", + "produces": [ + "application/json" + ], + "paths": { + "/products": { + "get": { + "summary": "Product Types", + "description": "The Products endpoint returns information about the Uber products offered at a given location. The response includes the display name and other details about each product, and lists the products in the proper display order.", + "parameters": [ + { + "name": "latitude", + "in": "query", + "description": "Latitude component of location.", + "required": true, + "type": "number", + "format": "double" + }, + { + "name": "longitude", + "in": "query", + "description": "Longitude component of location.", + "required": true, + "type": "number", + "format": "double" + } + ], + "tags": [ + "Products" + ], + "responses": { + "200": { + "description": "An array of products", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/Product" + } + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/estimates/price": { + "get": { + "summary": "Price Estimates", + "description": "The Price Estimates endpoint returns an estimated price range for each product offered at a given location. The price estimate is provided as a formatted string with the full price range and the localized currency symbol.

The response also includes low and high estimates, and the [ISO 4217](http://en.wikipedia.org/wiki/ISO_4217) currency code for situations requiring currency conversion. When surge is active for a particular product, its surge_multiplier will be greater than 1, but the price estimate already factors in this multiplier.", + "parameters": [ + { + "name": "start_latitude", + "in": "query", + "description": "Latitude component of start location.", + "required": true, + "type": "number", + "format": "double" + }, + { + "name": "start_longitude", + "in": "query", + "description": "Longitude component of start location.", + "required": true, + "type": "number", + "format": "double" + }, + { + "name": "end_latitude", + "in": "query", + "description": "Latitude component of end location.", + "required": true, + "type": "number", + "format": "double" + }, + { + "name": "end_longitude", + "in": "query", + "description": "Longitude component of end location.", + "required": true, + "type": "number", + "format": "double" + } + ], + "tags": [ + "Estimates" + ], + "responses": { + "200": { + "description": "An array of price estimates by product", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/PriceEstimate" + } + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/estimates/time": { + "get": { + "summary": "Time Estimates", + "description": "The Time Estimates endpoint returns ETAs for all products offered at a given location, with the responses expressed as integers in seconds. We recommend that this endpoint be called every minute to provide the most accurate, up-to-date ETAs.", + "parameters": [ + { + "name": "start_latitude", + "in": "query", + "description": "Latitude component of start location.", + "required": true, + "type": "number", + "format": "double" + }, + { + "name": "start_longitude", + "in": "query", + "description": "Longitude component of start location.", + "required": true, + "type": "number", + "format": "double" + }, + { + "name": "customer_uuid", + "in": "query", + "type": "string", + "format": "uuid", + "description": "Unique customer identifier to be used for experience customization." + }, + { + "name": "product_id", + "in": "query", + "type": "string", + "description": "Unique identifier representing a specific product for a given latitude & longitude." + } + ], + "tags": [ + "Estimates" + ], + "responses": { + "200": { + "description": "An array of products", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/Product" + } + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/me": { + "get": { + "summary": "User Profile", + "description": "The User Profile endpoint returns information about the Uber user that has authorized with the application.", + "tags": [ + "User" + ], + "responses": { + "200": { + "description": "Profile information for a user", + "schema": { + "$ref": "#/definitions/Profile" + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/history": { + "get": { + "summary": "User Activity", + "description": "The User Activity endpoint returns data about a user's lifetime activity with Uber. The response will include pickup locations and times, dropoff locations and times, the distance of past requests, and information about which products were requested.

The history array in the response will have a maximum length based on the limit parameter. The response value count may exceed limit, therefore subsequent API requests may be necessary.", + "parameters": [ + { + "name": "offset", + "in": "query", + "type": "integer", + "format": "int32", + "description": "Offset the list of returned results by this amount. Default is zero." + }, + { + "name": "limit", + "in": "query", + "type": "integer", + "format": "int32", + "description": "Number of items to retrieve. Default is 5, maximum is 100." + } + ], + "tags": [ + "User" + ], + "responses": { + "200": { + "description": "History information for the given user", + "schema": { + "$ref": "#/definitions/Activities" + } + }, + "default": { + "description": "Unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + } + }, + "definitions": { + "Product": { + "properties": { + "product_id": { + "type": "string", + "description": "Unique identifier representing a specific product for a given latitude & longitude. For example, uberX in San Francisco will have a different product_id than uberX in Los Angeles." + }, + "description": { + "type": "string", + "description": "Description of product." + }, + "display_name": { + "type": "string", + "description": "Display name of product." + }, + "capacity": { + "type": "string", + "description": "Capacity of product. For example, 4 people." + }, + "image": { + "type": "string", + "description": "Image URL representing the product." + } + } + }, + "PriceEstimate": { + "properties": { + "product_id": { + "type": "string", + "description": "Unique identifier representing a specific product for a given latitude & longitude. For example, uberX in San Francisco will have a different product_id than uberX in Los Angeles" + }, + "currency_code": { + "type": "string", + "description": "[ISO 4217](http://en.wikipedia.org/wiki/ISO_4217) currency code." + }, + "display_name": { + "type": "string", + "description": "Display name of product." + }, + "estimate": { + "type": "string", + "description": "Formatted string of estimate in local currency of the start location. Estimate could be a range, a single number (flat rate) or \"Metered\" for TAXI." + }, + "low_estimate": { + "type": "number", + "description": "Lower bound of the estimated price." + }, + "high_estimate": { + "type": "number", + "description": "Upper bound of the estimated price." + }, + "surge_multiplier": { + "type": "number", + "description": "Expected surge multiplier. Surge is active if surge_multiplier is greater than 1. Price estimate already factors in the surge multiplier." + } + } + }, + "Profile": { + "properties": { + "first_name": { + "type": "string", + "description": "First name of the Uber user." + }, + "last_name": { + "type": "string", + "description": "Last name of the Uber user." + }, + "email": { + "type": "string", + "description": "Email address of the Uber user" + }, + "picture": { + "type": "string", + "description": "Image URL of the Uber user." + }, + "promo_code": { + "type": "string", + "description": "Promo code of the Uber user." + } + } + }, + "Activity": { + "properties": { + "uuid": { + "type": "string", + "description": "Unique identifier for the activity" + } + } + }, + "Activities": { + "properties": { + "offset": { + "type": "integer", + "format": "int32", + "description": "Position in pagination." + }, + "limit": { + "type": "integer", + "format": "int32", + "description": "Number of items to retrieve (100 max)." + }, + "count": { + "type": "integer", + "format": "int32", + "description": "Total number of items available." + }, + "history": { + "type": "array", + "items": { + "$ref": "#/definitions/Activity" + } + } + } + }, + "Error": { + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "fields": { + "type": "string" + } + } + } + } +} \ No newline at end of file diff --git a/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/api-with-examples.yaml b/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/api-with-examples.yaml new file mode 100644 index 000000000..2f4a1ccf1 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/api-with-examples.yaml @@ -0,0 +1,164 @@ +swagger: "2.0" +info: + title: Simple API overview + version: v2 +paths: + /: + get: + operationId: listVersionsv2 + summary: List API versions + produces: + - application/json + responses: + "200": + description: |- + 200 300 response + examples: + application/json: |- + { + "versions": [ + { + "status": "CURRENT", + "updated": "2011-01-21T11:33:21Z", + "id": "v2.0", + "links": [ + { + "href": "http://127.0.0.1:8774/v2/", + "rel": "self" + } + ] + }, + { + "status": "EXPERIMENTAL", + "updated": "2013-07-23T11:33:21Z", + "id": "v3.0", + "links": [ + { + "href": "http://127.0.0.1:8774/v3/", + "rel": "self" + } + ] + } + ] + } + "300": + description: |- + 200 300 response + examples: + application/json: |- + { + "versions": [ + { + "status": "CURRENT", + "updated": "2011-01-21T11:33:21Z", + "id": "v2.0", + "links": [ + { + "href": "http://127.0.0.1:8774/v2/", + "rel": "self" + } + ] + }, + { + "status": "EXPERIMENTAL", + "updated": "2013-07-23T11:33:21Z", + "id": "v3.0", + "links": [ + { + "href": "http://127.0.0.1:8774/v3/", + "rel": "self" + } + ] + } + ] + } + /v2: + get: + operationId: getVersionDetailsv2 + summary: Show API version details + produces: + - application/json + responses: + "200": + description: |- + 200 203 response + examples: + application/json: |- + { + "version": { + "status": "CURRENT", + "updated": "2011-01-21T11:33:21Z", + "media-types": [ + { + "base": "application/xml", + "type": "application/vnd.openstack.compute+xml;version=2" + }, + { + "base": "application/json", + "type": "application/vnd.openstack.compute+json;version=2" + } + ], + "id": "v2.0", + "links": [ + { + "href": "http://127.0.0.1:8774/v2/", + "rel": "self" + }, + { + "href": "http://docs.openstack.org/api/openstack-compute/2/os-compute-devguide-2.pdf", + "type": "application/pdf", + "rel": "describedby" + }, + { + "href": "http://docs.openstack.org/api/openstack-compute/2/wadl/os-compute-2.wadl", + "type": "application/vnd.sun.wadl+xml", + "rel": "describedby" + }, + { + "href": "http://docs.openstack.org/api/openstack-compute/2/wadl/os-compute-2.wadl", + "type": "application/vnd.sun.wadl+xml", + "rel": "describedby" + } + ] + } + } + "203": + description: |- + 200 203 response + examples: + application/json: |- + { + "version": { + "status": "CURRENT", + "updated": "2011-01-21T11:33:21Z", + "media-types": [ + { + "base": "application/xml", + "type": "application/vnd.openstack.compute+xml;version=2" + }, + { + "base": "application/json", + "type": "application/vnd.openstack.compute+json;version=2" + } + ], + "id": "v2.0", + "links": [ + { + "href": "http://23.253.228.211:8774/v2/", + "rel": "self" + }, + { + "href": "http://docs.openstack.org/api/openstack-compute/2/os-compute-devguide-2.pdf", + "type": "application/pdf", + "rel": "describedby" + }, + { + "href": "http://docs.openstack.org/api/openstack-compute/2/wadl/os-compute-2.wadl", + "type": "application/vnd.sun.wadl+xml", + "rel": "describedby" + } + ] + } + } +consumes: +- application/json diff --git a/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/petstore-expanded.yaml b/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/petstore-expanded.yaml new file mode 100644 index 000000000..a5fee3ad9 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/petstore-expanded.yaml @@ -0,0 +1,142 @@ +swagger: "2.0" +info: + version: 1.0.0 + title: Swagger Petstore + description: A sample API that uses a petstore as an example to demonstrate features in the swagger-2.0 specification + termsOfService: http://swagger.io/terms/ + contact: + name: Swagger API Team + email: foo@example.com + url: http://madskristensen.net + license: + name: MIT + url: http://github.com/gruntjs/grunt/blob/master/LICENSE-MIT +host: petstore.swagger.io +basePath: /api +schemes: + - http +consumes: + - application/json +produces: + - application/json +paths: + /pets: + get: + description: | + Returns all pets from the system that the user has access to + Nam sed condimentum est. Maecenas tempor sagittis sapien, nec rhoncus sem sagittis sit amet. Aenean at gravida augue, ac iaculis sem. Curabitur odio lorem, ornare eget elementum nec, cursus id lectus. Duis mi turpis, pulvinar ac eros ac, tincidunt varius justo. In hac habitasse platea dictumst. Integer at adipiscing ante, a sagittis ligula. Aenean pharetra tempor ante molestie imperdiet. Vivamus id aliquam diam. Cras quis velit non tortor eleifend sagittis. Praesent at enim pharetra urna volutpat venenatis eget eget mauris. In eleifend fermentum facilisis. Praesent enim enim, gravida ac sodales sed, placerat id erat. Suspendisse lacus dolor, consectetur non augue vel, vehicula interdum libero. Morbi euismod sagittis libero sed lacinia. + + Sed tempus felis lobortis leo pulvinar rutrum. Nam mattis velit nisl, eu condimentum ligula luctus nec. Phasellus semper velit eget aliquet faucibus. In a mattis elit. Phasellus vel urna viverra, condimentum lorem id, rhoncus nibh. Ut pellentesque posuere elementum. Sed a varius odio. Morbi rhoncus ligula libero, vel eleifend nunc tristique vitae. Fusce et sem dui. Aenean nec scelerisque tortor. Fusce malesuada accumsan magna vel tempus. Quisque mollis felis eu dolor tristique, sit amet auctor felis gravida. Sed libero lorem, molestie sed nisl in, accumsan tempor nisi. Fusce sollicitudin massa ut lacinia mattis. Sed vel eleifend lorem. Pellentesque vitae felis pretium, pulvinar elit eu, euismod sapien. + operationId: findPets + parameters: + - name: tags + in: query + description: tags to filter by + required: false + type: array + collectionFormat: csv + items: + type: string + - name: limit + in: query + description: maximum number of results to return + required: false + type: integer + format: int32 + responses: + "200": + description: pet response + schema: + type: array + items: + $ref: '#/definitions/Pet' + default: + description: unexpected error + schema: + $ref: '#/definitions/Error' + post: + description: Creates a new pet in the store. Duplicates are allowed + operationId: addPet + parameters: + - name: pet + in: body + description: Pet to add to the store + required: true + schema: + $ref: '#/definitions/NewPet' + responses: + "200": + description: pet response + schema: + $ref: '#/definitions/Pet' + default: + description: unexpected error + schema: + $ref: '#/definitions/Error' + /pets/{id}: + get: + description: Returns a user based on a single ID, if the user does not have access to the pet + operationId: find pet by id + parameters: + - name: id + in: path + description: ID of pet to fetch + required: true + type: integer + format: int64 + responses: + "200": + description: pet response + schema: + $ref: '#/definitions/Pet' + default: + description: unexpected error + schema: + $ref: '#/definitions/Error' + delete: + description: deletes a single pet based on the ID supplied + operationId: deletePet + parameters: + - name: id + in: path + description: ID of pet to delete + required: true + type: integer + format: int64 + responses: + "204": + description: pet deleted + default: + description: unexpected error + schema: + $ref: '#/definitions/Error' +definitions: + Pet: + allOf: + - $ref: '#/definitions/NewPet' + - required: + - id + properties: + id: + type: integer + format: int64 + + NewPet: + required: + - name + properties: + name: + type: string + tag: + type: string + + Error: + required: + - code + - message + properties: + code: + type: integer + format: int32 + message: + type: string diff --git a/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/petstore-minimal.yaml b/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/petstore-minimal.yaml new file mode 100644 index 000000000..c3e06e915 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/petstore-minimal.yaml @@ -0,0 +1,47 @@ +--- + swagger: "2.0" + info: + version: "1.0.0" + title: "Swagger Petstore" + description: "A sample API that uses a petstore as an example to demonstrate features in the swagger-2.0 specification" + termsOfService: "http://swagger.io/terms/" + contact: + name: "Swagger API Team" + license: + name: "MIT" + host: "petstore.swagger.io" + basePath: "/api" + schemes: + - "http" + consumes: + - "application/json" + produces: + - "application/json" + paths: + /pets: + get: + description: "Returns all pets from the system that the user has access to" + produces: + - "application/json" + responses: + "200": + description: "A list of pets." + schema: + type: "array" + items: + $ref: "#/definitions/Pet" + definitions: + Pet: + type: "object" + required: + - "id" + - "name" + properties: + id: + type: "integer" + format: "int64" + name: + type: "string" + tag: + type: "string" + diff --git a/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/petstore-separate/common/Error.yaml b/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/petstore-separate/common/Error.yaml new file mode 100644 index 000000000..2d87b744f --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/petstore-separate/common/Error.yaml @@ -0,0 +1,10 @@ +type: object +required: + - code + - message +properties: + code: + type: integer + format: int32 + message: + type: string diff --git a/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/petstore-separate/spec/NewPet.yaml b/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/petstore-separate/spec/NewPet.yaml new file mode 100644 index 000000000..35e67449c --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/petstore-separate/spec/NewPet.yaml @@ -0,0 +1,9 @@ +type: object +allOf: + - $ref: 'Pet.yaml' + - required: + - name + properties: + description: + type: integer + format: int64 diff --git a/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/petstore-separate/spec/Pet.yaml b/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/petstore-separate/spec/Pet.yaml new file mode 100644 index 000000000..bb113196f --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/petstore-separate/spec/Pet.yaml @@ -0,0 +1,12 @@ +type: object +required: + - id + - name +properties: + id: + type: integer + format: int64 + name: + type: string + tag: + type: string diff --git a/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/petstore-separate/spec/parameters.yaml b/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/petstore-separate/spec/parameters.yaml new file mode 100644 index 000000000..18736aebd --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/petstore-separate/spec/parameters.yaml @@ -0,0 +1,16 @@ +tagsParam: + name: tags + in: query + description: tags to filter by + required: false + type: array + collectionFormat: csv + items: + type: string +limitsParam: + name: limit + in: query + description: maximum number of results to return + required: false + type: integer + format: int32 diff --git a/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/petstore-separate/spec/swagger.text b/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/petstore-separate/spec/swagger.text new file mode 100644 index 000000000..8bb53c937 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/petstore-separate/spec/swagger.text @@ -0,0 +1,214 @@ +swagger: "2.0" +info: < + title: "Swagger Petstore" + version: "1.0.0" + description: "A sample API that uses a petstore as an example to demonstrate features in the swagger-2.0 specification" + terms_of_service: "http://helloreverb.com/terms/" + contact: < + name: "Wordnik API Team" + url: "http://madskristensen.net" + email: "foo@example.com" + > + license: < + name: "MIT" + url: "http://github.com/gruntjs/grunt/blob/master/LICENSE-MIT" + > +> +host: "petstore.swagger.wordnik.com" +base_path: "/api" +schemes: "http" +consumes: "application/json" +produces: "application/json" +paths: < + path: < + name: "/pets" + value: < + get: < + description: "Returns all pets from the system that the user has access to\nNam sed condimentum est. Maecenas tempor sagittis sapien, nec rhoncus sem sagittis sit amet. Aenean at gravida augue, ac iaculis sem. Curabitur odio lorem, ornare eget elementum nec, cursus id lectus. Duis mi turpis, pulvinar ac eros ac, tincidunt varius justo. In hac habitasse platea dictumst. Integer at adipiscing ante, a sagittis ligula. Aenean pharetra tempor ante molestie imperdiet. Vivamus id aliquam diam. Cras quis velit non tortor eleifend sagittis. Praesent at enim pharetra urna volutpat venenatis eget eget mauris. In eleifend fermentum facilisis. Praesent enim enim, gravida ac sodales sed, placerat id erat. Suspendisse lacus dolor, consectetur non augue vel, vehicula interdum libero. Morbi euismod sagittis libero sed lacinia.\n\nSed tempus felis lobortis leo pulvinar rutrum. Nam mattis velit nisl, eu condimentum ligula luctus nec. Phasellus semper velit eget aliquet faucibus. In a mattis elit. Phasellus vel urna viverra, condimentum lorem id, rhoncus nibh. Ut pellentesque posuere elementum. Sed a varius odio. Morbi rhoncus ligula libero, vel eleifend nunc tristique vitae. Fusce et sem dui. Aenean nec scelerisque tortor. Fusce malesuada accumsan magna vel tempus. Quisque mollis felis eu dolor tristique, sit amet auctor felis gravida. Sed libero lorem, molestie sed nisl in, accumsan tempor nisi. Fusce sollicitudin massa ut lacinia mattis. Sed vel eleifend lorem. Pellentesque vitae felis pretium, pulvinar elit eu, euismod sapien.\n" + operation_id: "findPets" + parameters: < + json_reference: < + _ref: "parameters.yaml#/tagsParam" + > + > + parameters: < + json_reference: < + _ref: "parameters.yaml#/limitsParam" + > + > + responses: < + response_code: < + name: "200" + value: < + response: < + description: "pet response" + schema: < + schema: < + type: < + value: "array" + > + items: < + schema: < + _ref: "Pet.yaml" + > + > + > + > + > + > + > + response_code: < + name: "default" + value: < + response: < + description: "unexpected error" + schema: < + schema: < + _ref: "../common/Error.yaml" + > + > + > + > + > + > + > + post: < + description: "Creates a new pet in the store. Duplicates are allowed" + operation_id: "addPet" + parameters: < + parameter: < + body_parameter: < + description: "Pet to add to the store" + name: "pet" + in: "body" + required: true + schema: < + _ref: "NewPet.yaml" + > + > + > + > + responses: < + response_code: < + name: "200" + value: < + response: < + description: "pet response" + schema: < + schema: < + _ref: "Pet.yaml" + > + > + > + > + > + response_code: < + name: "default" + value: < + response: < + description: "unexpected error" + schema: < + schema: < + _ref: "../common/Error.yaml" + > + > + > + > + > + > + > + > + > + path: < + name: "/pets/{id}" + value: < + get: < + description: "Returns a user based on a single ID, if the user does not have access to the pet" + operation_id: "find pet by id" + parameters: < + parameter: < + non_body_parameter: < + path_parameter_sub_schema: < + required: true + in: "path" + description: "ID of pet to fetch" + name: "id" + type: "integer" + format: "int64" + > + > + > + > + responses: < + response_code: < + name: "200" + value: < + response: < + description: "pet response" + schema: < + schema: < + _ref: "Pet.yaml" + > + > + > + > + > + response_code: < + name: "default" + value: < + response: < + description: "unexpected error" + schema: < + schema: < + _ref: "../common/Error.yaml" + > + > + > + > + > + > + > + delete: < + description: "deletes a single pet based on the ID supplied" + operation_id: "deletePet" + parameters: < + parameter: < + non_body_parameter: < + path_parameter_sub_schema: < + required: true + in: "path" + description: "ID of pet to delete" + name: "id" + type: "integer" + format: "int64" + > + > + > + > + responses: < + response_code: < + name: "204" + value: < + response: < + description: "pet deleted" + > + > + > + response_code: < + name: "default" + value: < + response: < + description: "unexpected error" + schema: < + schema: < + _ref: "../common/Error.yaml" + > + > + > + > + > + > + > + > + > +> diff --git a/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/petstore-separate/spec/swagger.yaml b/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/petstore-separate/spec/swagger.yaml new file mode 100644 index 000000000..850527ebf --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/petstore-separate/spec/swagger.yaml @@ -0,0 +1,100 @@ +swagger: "2.0" +info: + version: 1.0.0 + title: Swagger Petstore + description: A sample API that uses a petstore as an example to demonstrate features in the swagger-2.0 specification + termsOfService: http://helloreverb.com/terms/ + contact: + name: Wordnik API Team + email: foo@example.com + url: http://madskristensen.net + license: + name: MIT + url: http://github.com/gruntjs/grunt/blob/master/LICENSE-MIT +host: petstore.swagger.wordnik.com +basePath: /api +schemes: + - http +consumes: + - application/json +produces: + - application/json +paths: + /pets: + get: + description: | + Returns all pets from the system that the user has access to + Nam sed condimentum est. Maecenas tempor sagittis sapien, nec rhoncus sem sagittis sit amet. Aenean at gravida augue, ac iaculis sem. Curabitur odio lorem, ornare eget elementum nec, cursus id lectus. Duis mi turpis, pulvinar ac eros ac, tincidunt varius justo. In hac habitasse platea dictumst. Integer at adipiscing ante, a sagittis ligula. Aenean pharetra tempor ante molestie imperdiet. Vivamus id aliquam diam. Cras quis velit non tortor eleifend sagittis. Praesent at enim pharetra urna volutpat venenatis eget eget mauris. In eleifend fermentum facilisis. Praesent enim enim, gravida ac sodales sed, placerat id erat. Suspendisse lacus dolor, consectetur non augue vel, vehicula interdum libero. Morbi euismod sagittis libero sed lacinia. + + Sed tempus felis lobortis leo pulvinar rutrum. Nam mattis velit nisl, eu condimentum ligula luctus nec. Phasellus semper velit eget aliquet faucibus. In a mattis elit. Phasellus vel urna viverra, condimentum lorem id, rhoncus nibh. Ut pellentesque posuere elementum. Sed a varius odio. Morbi rhoncus ligula libero, vel eleifend nunc tristique vitae. Fusce et sem dui. Aenean nec scelerisque tortor. Fusce malesuada accumsan magna vel tempus. Quisque mollis felis eu dolor tristique, sit amet auctor felis gravida. Sed libero lorem, molestie sed nisl in, accumsan tempor nisi. Fusce sollicitudin massa ut lacinia mattis. Sed vel eleifend lorem. Pellentesque vitae felis pretium, pulvinar elit eu, euismod sapien. + operationId: findPets + parameters: + - $ref: 'parameters.yaml#/tagsParam' + - $ref: 'parameters.yaml#/limitsParam' + responses: + "200": + description: pet response + schema: + type: array + items: + $ref: 'Pet.yaml' + default: + description: unexpected error + schema: + $ref: '../common/Error.yaml' + post: + description: Creates a new pet in the store. Duplicates are allowed + operationId: addPet + parameters: + - name: pet + in: body + description: Pet to add to the store + required: true + schema: + $ref: 'NewPet.yaml' + responses: + "200": + description: pet response + schema: + $ref: 'Pet.yaml' + default: + description: unexpected error + schema: + $ref: '../common/Error.yaml' + /pets/{id}: + get: + description: Returns a user based on a single ID, if the user does not have access to the pet + operationId: find pet by id + parameters: + - name: id + in: path + description: ID of pet to fetch + required: true + type: integer + format: int64 + responses: + "200": + description: pet response + schema: + $ref: 'Pet.yaml' + default: + description: unexpected error + schema: + $ref: '../common/Error.yaml' + delete: + description: deletes a single pet based on the ID supplied + operationId: deletePet + parameters: + - name: id + in: path + description: ID of pet to delete + required: true + type: integer + format: int64 + responses: + "204": + description: pet deleted + default: + description: unexpected error + schema: + $ref: '../common/Error.yaml' diff --git a/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/petstore-simple.yaml b/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/petstore-simple.yaml new file mode 100644 index 000000000..d5fa07b42 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/petstore-simple.yaml @@ -0,0 +1,157 @@ +--- + swagger: "2.0" + info: + version: "1.0.0" + title: "Swagger Petstore" + description: "A sample API that uses a petstore as an example to demonstrate features in the swagger-2.0 specification" + termsOfService: "http://swagger.io/terms/" + contact: + name: "Swagger API Team" + license: + name: "MIT" + host: "petstore.swagger.io" + basePath: "/api" + schemes: + - "http" + consumes: + - "application/json" + produces: + - "application/json" + paths: + /pets: + get: + description: "Returns all pets from the system that the user has access to" + operationId: "findPets" + produces: + - "application/json" + - "application/xml" + - "text/xml" + - "text/html" + parameters: + - + name: "tags" + in: "query" + description: "tags to filter by" + required: false + type: "array" + items: + type: "string" + collectionFormat: "csv" + - + name: "limit" + in: "query" + description: "maximum number of results to return" + required: false + type: "integer" + format: "int32" + responses: + "200": + description: "pet response" + schema: + type: "array" + items: + $ref: "#/definitions/Pet" + default: + description: "unexpected error" + schema: + $ref: "#/definitions/ErrorModel" + post: + description: "Creates a new pet in the store. Duplicates are allowed" + operationId: "addPet" + produces: + - "application/json" + parameters: + - + name: "pet" + in: "body" + description: "Pet to add to the store" + required: true + schema: + $ref: "#/definitions/NewPet" + responses: + "200": + description: "pet response" + schema: + $ref: "#/definitions/Pet" + default: + description: "unexpected error" + schema: + $ref: "#/definitions/ErrorModel" + /pets/{id}: + get: + description: "Returns a user based on a single ID, if the user does not have access to the pet" + operationId: "findPetById" + produces: + - "application/json" + - "application/xml" + - "text/xml" + - "text/html" + parameters: + - + name: "id" + in: "path" + description: "ID of pet to fetch" + required: true + type: "integer" + format: "int64" + responses: + "200": + description: "pet response" + schema: + $ref: "#/definitions/Pet" + default: + description: "unexpected error" + schema: + $ref: "#/definitions/ErrorModel" + delete: + description: "deletes a single pet based on the ID supplied" + operationId: "deletePet" + parameters: + - + name: "id" + in: "path" + description: "ID of pet to delete" + required: true + type: "integer" + format: "int64" + responses: + "204": + description: "pet deleted" + default: + description: "unexpected error" + schema: + $ref: "#/definitions/ErrorModel" + definitions: + Pet: + type: "object" + allOf: + - + $ref: "#/definitions/NewPet" + - + required: + - "id" + properties: + id: + type: "integer" + format: "int64" + NewPet: + type: "object" + required: + - "name" + properties: + name: + type: "string" + tag: + type: "string" + ErrorModel: + type: "object" + required: + - "code" + - "message" + properties: + code: + type: "integer" + format: "int32" + message: + type: "string" + diff --git a/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/petstore-with-external-docs.yaml b/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/petstore-with-external-docs.yaml new file mode 100644 index 000000000..3db47ff36 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/petstore-with-external-docs.yaml @@ -0,0 +1,166 @@ +--- + swagger: "2.0" + info: + version: "1.0.0" + title: "Swagger Petstore" + description: "A sample API that uses a petstore as an example to demonstrate features in the swagger-2.0 specification" + termsOfService: "http://swagger.io/terms/" + contact: + name: "Swagger API Team" + email: "apiteam@swagger.io" + url: "http://swagger.io" + license: + name: "MIT" + url: "http://github.com/gruntjs/grunt/blob/master/LICENSE-MIT" + externalDocs: + description: "find more info here" + url: "https://swagger.io/about" + host: "petstore.swagger.io" + basePath: "/api" + schemes: + - "http" + consumes: + - "application/json" + produces: + - "application/json" + paths: + /pets: + get: + description: "Returns all pets from the system that the user has access to" + operationId: "findPets" + externalDocs: + description: "find more info here" + url: "https://swagger.io/about" + produces: + - "application/json" + - "application/xml" + - "text/xml" + - "text/html" + parameters: + - + name: "tags" + in: "query" + description: "tags to filter by" + required: false + type: "array" + items: + type: "string" + collectionFormat: "csv" + - + name: "limit" + in: "query" + description: "maximum number of results to return" + required: false + type: "integer" + format: "int32" + responses: + "200": + description: "pet response" + schema: + type: "array" + items: + $ref: "#/definitions/Pet" + default: + description: "unexpected error" + schema: + $ref: "#/definitions/ErrorModel" + post: + description: "Creates a new pet in the store. Duplicates are allowed" + operationId: "addPet" + produces: + - "application/json" + parameters: + - + name: "pet" + in: "body" + description: "Pet to add to the store" + required: true + schema: + $ref: "#/definitions/NewPet" + responses: + "200": + description: "pet response" + schema: + $ref: "#/definitions/Pet" + default: + description: "unexpected error" + schema: + $ref: "#/definitions/ErrorModel" + /pets/{id}: + get: + description: "Returns a user based on a single ID, if the user does not have access to the pet" + operationId: "findPetById" + produces: + - "application/json" + - "application/xml" + - "text/xml" + - "text/html" + parameters: + - + name: "id" + in: "path" + description: "ID of pet to fetch" + required: true + type: "integer" + format: "int64" + responses: + "200": + description: "pet response" + schema: + $ref: "#/definitions/Pet" + default: + description: "unexpected error" + schema: + $ref: "#/definitions/ErrorModel" + delete: + description: "deletes a single pet based on the ID supplied" + operationId: "deletePet" + parameters: + - + name: "id" + in: "path" + description: "ID of pet to delete" + required: true + type: "integer" + format: "int64" + responses: + "204": + description: "pet deleted" + default: + description: "unexpected error" + schema: + $ref: "#/definitions/ErrorModel" + definitions: + Pet: + type: "object" + allOf: + - + $ref: "#/definitions/NewPet" + - + required: + - "id" + properties: + id: + type: "integer" + format: "int64" + NewPet: + type: "object" + required: + - "name" + properties: + name: + type: "string" + tag: + type: "string" + ErrorModel: + type: "object" + required: + - "code" + - "message" + properties: + code: + type: "integer" + format: "int32" + message: + type: "string" + diff --git a/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/petstore.yaml b/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/petstore.yaml new file mode 100644 index 000000000..790948cb9 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/petstore.yaml @@ -0,0 +1,101 @@ +swagger: "2.0" +info: + version: 1.0.0 + title: Swagger Petstore + license: + name: MIT +host: petstore.swagger.io +basePath: /v1 +schemes: + - http +consumes: + - application/json +produces: + - application/json +paths: + /pets: + get: + summary: List all pets + operationId: listPets + tags: + - pets + parameters: + - name: limit + in: query + description: How many items to return at one time (max 100) + required: false + type: integer + format: int32 + responses: + "200": + description: An paged array of pets + headers: + x-next: + type: string + description: A link to the next page of responses + schema: + $ref: '#/definitions/Pets' + default: + description: unexpected error + schema: + $ref: '#/definitions/Error' + post: + summary: Create a pet + operationId: createPets + tags: + - pets + responses: + "201": + description: Null response + default: + description: unexpected error + schema: + $ref: '#/definitions/Error' + /pets/{petId}: + get: + summary: Info for a specific pet + operationId: showPetById + tags: + - pets + parameters: + - name: petId + in: path + required: true + description: The id of the pet to retrieve + type: string + responses: + "200": + description: Expected response to a valid request + schema: + $ref: '#/definitions/Pets' + default: + description: unexpected error + schema: + $ref: '#/definitions/Error' +definitions: + Pet: + required: + - id + - name + properties: + id: + type: integer + format: int64 + name: + type: string + tag: + type: string + Pets: + type: array + items: + $ref: '#/definitions/Pet' + Error: + required: + - code + - message + properties: + code: + type: integer + format: int32 + message: + type: string diff --git a/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/uber.yaml b/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/uber.yaml new file mode 100644 index 000000000..12c14b08a --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/examples/v2.0/yaml/uber.yaml @@ -0,0 +1,273 @@ +# this is an example of the Uber API +# as a demonstration of an API spec in YAML +swagger: "2.0" +info: + title: Uber API + description: Move your app forward with the Uber API + version: "1.0.0" +# the domain of the service +host: api.uber.com +# array of all schemes that your API supports +schemes: + - https +# will be prefixed to all paths +basePath: /v1 +securityDefinitions: + apikey: + type: apiKey + name: server_token + in: query +produces: + - application/json +paths: + /products: + get: + summary: Product Types + description: The Products endpoint returns information about the Uber products offered at a given location. The response includes the display name and other details about each product, and lists the products in the proper display order. + parameters: + - name: latitude + in: query + description: Latitude component of location. + required: true + type: number + format: double + - name: longitude + in: query + description: Longitude component of location. + required: true + type: number + format: double + security: + - apikey: [] + tags: + - Products + responses: + "200": + description: An array of products + schema: + type: array + items: + $ref: '#/definitions/Product' + default: + description: Unexpected error + schema: + $ref: '#/definitions/Error' + /estimates/price: + get: + summary: Price Estimates + description: The Price Estimates endpoint returns an estimated price range for each product offered at a given location. The price estimate is provided as a formatted string with the full price range and the localized currency symbol.

The response also includes low and high estimates, and the [ISO 4217](http://en.wikipedia.org/wiki/ISO_4217) currency code for situations requiring currency conversion. When surge is active for a particular product, its surge_multiplier will be greater than 1, but the price estimate already factors in this multiplier. + parameters: + - name: start_latitude + in: query + description: Latitude component of start location. + required: true + type: number + format: double + - name: start_longitude + in: query + description: Longitude component of start location. + required: true + type: number + format: double + - name: end_latitude + in: query + description: Latitude component of end location. + required: true + type: number + format: double + - name: end_longitude + in: query + description: Longitude component of end location. + required: true + type: number + format: double + tags: + - Estimates + responses: + "200": + description: An array of price estimates by product + schema: + type: array + items: + $ref: '#/definitions/PriceEstimate' + default: + description: Unexpected error + schema: + $ref: '#/definitions/Error' + /estimates/time: + get: + summary: Time Estimates + description: The Time Estimates endpoint returns ETAs for all products offered at a given location, with the responses expressed as integers in seconds. We recommend that this endpoint be called every minute to provide the most accurate, up-to-date ETAs. + parameters: + - name: start_latitude + in: query + description: Latitude component of start location. + required: true + type: number + format: double + - name: start_longitude + in: query + description: Longitude component of start location. + required: true + type: number + format: double + - name: customer_uuid + in: query + type: string + format: uuid + description: Unique customer identifier to be used for experience customization. + - name: product_id + in: query + type: string + description: Unique identifier representing a specific product for a given latitude & longitude. + tags: + - Estimates + responses: + "200": + description: An array of products + schema: + type: array + items: + $ref: '#/definitions/Product' + default: + description: Unexpected error + schema: + $ref: '#/definitions/Error' + /me: + get: + summary: User Profile + description: The User Profile endpoint returns information about the Uber user that has authorized with the application. + tags: + - User + responses: + "200": + description: Profile information for a user + schema: + $ref: '#/definitions/Profile' + default: + description: Unexpected error + schema: + $ref: '#/definitions/Error' + /history: + get: + summary: User Activity + description: The User Activity endpoint returns data about a user's lifetime activity with Uber. The response will include pickup locations and times, dropoff locations and times, the distance of past requests, and information about which products were requested.

The history array in the response will have a maximum length based on the limit parameter. The response value count may exceed limit, therefore subsequent API requests may be necessary. + parameters: + - name: offset + in: query + type: integer + format: int32 + description: Offset the list of returned results by this amount. Default is zero. + - name: limit + in: query + type: integer + format: int32 + description: Number of items to retrieve. Default is 5, maximum is 100. + tags: + - User + responses: + "200": + description: History information for the given user + schema: + $ref: '#/definitions/Activities' + default: + description: Unexpected error + schema: + $ref: '#/definitions/Error' +definitions: + Product: + properties: + product_id: + type: string + description: Unique identifier representing a specific product for a given latitude & longitude. For example, uberX in San Francisco will have a different product_id than uberX in Los Angeles. + description: + type: string + description: Description of product. + display_name: + type: string + description: Display name of product. + capacity: + type: integer + description: Capacity of product. For example, 4 people. + image: + type: string + description: Image URL representing the product. + ProductList: + properties: + products: + description: Contains the list of products + type: array + items: + $ref: "#/definitions/Product" + PriceEstimate: + properties: + product_id: + type: string + description: Unique identifier representing a specific product for a given latitude & longitude. For example, uberX in San Francisco will have a different product_id than uberX in Los Angeles + currency_code: + type: string + description: "[ISO 4217](http://en.wikipedia.org/wiki/ISO_4217) currency code." + display_name: + type: string + description: Display name of product. + estimate: + type: string + description: Formatted string of estimate in local currency of the start location. Estimate could be a range, a single number (flat rate) or "Metered" for TAXI. + low_estimate: + type: number + description: Lower bound of the estimated price. + high_estimate: + type: number + description: Upper bound of the estimated price. + surge_multiplier: + type: number + description: Expected surge multiplier. Surge is active if surge_multiplier is greater than 1. Price estimate already factors in the surge multiplier. + Profile: + properties: + first_name: + type: string + description: First name of the Uber user. + last_name: + type: string + description: Last name of the Uber user. + email: + type: string + description: Email address of the Uber user + picture: + type: string + description: Image URL of the Uber user. + promo_code: + type: string + description: Promo code of the Uber user. + Activity: + properties: + uuid: + type: string + description: Unique identifier for the activity + Activities: + properties: + offset: + type: integer + format: int32 + description: Position in pagination. + limit: + type: integer + format: int32 + description: Number of items to retrieve (100 max). + count: + type: integer + format: int32 + description: Total number of items available. + history: + type: array + items: + $ref: '#/definitions/Activity' + Error: + properties: + code: + type: integer + format: int32 + message: + type: string + fields: + type: string diff --git a/vendor/github.com/googleapis/gnostic/examples/v3.0/json/petstore.json b/vendor/github.com/googleapis/gnostic/examples/v3.0/json/petstore.json new file mode 100644 index 000000000..af8a886d4 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/examples/v3.0/json/petstore.json @@ -0,0 +1,176 @@ +{ + "openapi": "3.0", + "info": { + "version": "1.0.0", + "title": "OpenAPI Petstore", + "license": { + "name": "MIT" + } + }, + "servers": [ + { + "url": "https://petstore.openapis.org/v1", + "description": "Development server" + } + ], + "paths": { + "/pets": { + "get": { + "summary": "List all pets", + "operationId": "listPets", + "tags": [ + "pets" + ], + "parameters": [ + { + "name": "limit", + "in": "query", + "description": "How many items to return at one time (max 100)", + "required": false, + "schema": { + "type": "integer", + "format": "int32" + } + } + ], + "responses": { + "200": { + "description": "An paged array of pets", + "headers": { + "x-next": { + "schema": { + "type": "string" + }, + "description": "A link to the next page of responses" + } + }, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Pets" + } + } + } + }, + "default": { + "description": "unexpected error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Error" + } + } + } + } + } + }, + "post": { + "summary": "Create a pet", + "operationId": "createPets", + "tags": [ + "pets" + ], + "responses": { + "201": { + "description": "Null response" + }, + "default": { + "description": "unexpected error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Error" + } + } + } + } + } + } + }, + "/pets/{petId}": { + "get": { + "summary": "Info for a specific pet", + "operationId": "showPetById", + "tags": [ + "pets" + ], + "parameters": [ + { + "name": "petId", + "in": "path", + "required": true, + "description": "The id of the pet to retrieve", + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Expected response to a valid request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Pets" + } + } + } + }, + "default": { + "description": "unexpected error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Error" + } + } + } + } + } + } + } + }, + "components": { + "schemas": { + "Pet": { + "required": [ + "id", + "name" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "name": { + "type": "string" + }, + "tag": { + "type": "string" + } + } + }, + "Pets": { + "type": "array", + "items": { + "$ref": "#/components/schemas/Pet" + } + }, + "Error": { + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + } + } + } + } + } +} diff --git a/vendor/github.com/googleapis/gnostic/examples/v3.0/yaml/petstore.yaml b/vendor/github.com/googleapis/gnostic/examples/v3.0/yaml/petstore.yaml new file mode 100644 index 000000000..6951bb2ff --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/examples/v3.0/yaml/petstore.yaml @@ -0,0 +1,110 @@ +openapi: "3.0" +info: + version: 1.0.0 + title: OpenAPI Petstore + license: + name: MIT +servers: +- url: https://petstore.openapis.org/v1 + description: Development server +paths: + /pets: + get: + summary: List all pets + operationId: listPets + tags: + - pets + parameters: + - name: limit + in: query + description: How many items to return at one time (max 100) + required: false + schema: + type: integer + format: int32 + responses: + "200": + description: An paged array of pets + headers: + x-next: + schema: + type: string + description: A link to the next page of responses + content: + application/json: + schema: + $ref: '#/components/schemas/Pets' + default: + description: unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + post: + summary: Create a pet + operationId: createPets + tags: + - pets + responses: + "201": + description: Null response + default: + description: unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + /pets/{petId}: + get: + summary: Info for a specific pet + operationId: showPetById + tags: + - pets + parameters: + - name: petId + in: path + required: true + description: The id of the pet to retrieve + schema: + type: string + responses: + "200": + description: Expected response to a valid request + content: + application/json: + schema: + $ref: '#/components/schemas/Pets' + default: + description: unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' +components: + schemas: + Pet: + required: + - id + - name + properties: + id: + type: integer + format: int64 + name: + type: string + tag: + type: string + Pets: + type: array + items: + $ref: '#/components/schemas/Pet' + Error: + required: + - code + - message + properties: + code: + type: integer + format: int32 + message: + type: string diff --git a/vendor/github.com/googleapis/gnostic/extensions/COMPILE-EXTENSION.sh b/vendor/github.com/googleapis/gnostic/extensions/COMPILE-EXTENSION.sh new file mode 100755 index 000000000..68d02a02a --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/extensions/COMPILE-EXTENSION.sh @@ -0,0 +1,5 @@ +go get github.com/golang/protobuf/protoc-gen-go + +protoc \ +--go_out=Mgoogle/protobuf/any.proto=github.com/golang/protobuf/ptypes/any:. *.proto + diff --git a/vendor/github.com/googleapis/gnostic/extensions/README.md b/vendor/github.com/googleapis/gnostic/extensions/README.md new file mode 100644 index 000000000..ff1c2eb1e --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/extensions/README.md @@ -0,0 +1,5 @@ +# Extensions + +This directory contains support code for building Gnostic extensions and associated examples. + +Extensions are used to compile vendor or specification extensions into protocol buffer structures. diff --git a/vendor/github.com/googleapis/gnostic/extensions/extension.pb.go b/vendor/github.com/googleapis/gnostic/extensions/extension.pb.go new file mode 100644 index 000000000..7c6b91496 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/extensions/extension.pb.go @@ -0,0 +1,219 @@ +// Code generated by protoc-gen-go. +// source: extension.proto +// DO NOT EDIT! + +/* +Package openapiextension_v1 is a generated protocol buffer package. + +It is generated from these files: + extension.proto + +It has these top-level messages: + Version + ExtensionHandlerRequest + ExtensionHandlerResponse + Wrapper +*/ +package openapiextension_v1 + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import google_protobuf "github.com/golang/protobuf/ptypes/any" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The version number of OpenAPI compiler. +type Version struct { + Major int32 `protobuf:"varint,1,opt,name=major" json:"major,omitempty"` + Minor int32 `protobuf:"varint,2,opt,name=minor" json:"minor,omitempty"` + Patch int32 `protobuf:"varint,3,opt,name=patch" json:"patch,omitempty"` + // A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should + // be empty for mainline stable releases. + Suffix string `protobuf:"bytes,4,opt,name=suffix" json:"suffix,omitempty"` +} + +func (m *Version) Reset() { *m = Version{} } +func (m *Version) String() string { return proto.CompactTextString(m) } +func (*Version) ProtoMessage() {} +func (*Version) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } + +func (m *Version) GetMajor() int32 { + if m != nil { + return m.Major + } + return 0 +} + +func (m *Version) GetMinor() int32 { + if m != nil { + return m.Minor + } + return 0 +} + +func (m *Version) GetPatch() int32 { + if m != nil { + return m.Patch + } + return 0 +} + +func (m *Version) GetSuffix() string { + if m != nil { + return m.Suffix + } + return "" +} + +// An encoded Request is written to the ExtensionHandler's stdin. +type ExtensionHandlerRequest struct { + // The OpenAPI descriptions that were explicitly listed on the command line. + // The specifications will appear in the order they are specified to openapic. + Wrapper *Wrapper `protobuf:"bytes,1,opt,name=wrapper" json:"wrapper,omitempty"` + // The version number of openapi compiler. + CompilerVersion *Version `protobuf:"bytes,3,opt,name=compiler_version,json=compilerVersion" json:"compiler_version,omitempty"` +} + +func (m *ExtensionHandlerRequest) Reset() { *m = ExtensionHandlerRequest{} } +func (m *ExtensionHandlerRequest) String() string { return proto.CompactTextString(m) } +func (*ExtensionHandlerRequest) ProtoMessage() {} +func (*ExtensionHandlerRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } + +func (m *ExtensionHandlerRequest) GetWrapper() *Wrapper { + if m != nil { + return m.Wrapper + } + return nil +} + +func (m *ExtensionHandlerRequest) GetCompilerVersion() *Version { + if m != nil { + return m.CompilerVersion + } + return nil +} + +// The extensions writes an encoded ExtensionHandlerResponse to stdout. +type ExtensionHandlerResponse struct { + // true if the extension is handled by the extension handler; false otherwise + Handled bool `protobuf:"varint,1,opt,name=handled" json:"handled,omitempty"` + // Error message. If non-empty, the extension handling failed. + // The extension handler process should exit with status code zero + // even if it reports an error in this way. + // + // This should be used to indicate errors which prevent the extension from + // operating as intended. Errors which indicate a problem in gnostic + // itself -- such as the input Document being unparseable -- should be + // reported by writing a message to stderr and exiting with a non-zero + // status code. + Error []string `protobuf:"bytes,2,rep,name=error" json:"error,omitempty"` + // text output + Value *google_protobuf.Any `protobuf:"bytes,3,opt,name=value" json:"value,omitempty"` +} + +func (m *ExtensionHandlerResponse) Reset() { *m = ExtensionHandlerResponse{} } +func (m *ExtensionHandlerResponse) String() string { return proto.CompactTextString(m) } +func (*ExtensionHandlerResponse) ProtoMessage() {} +func (*ExtensionHandlerResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } + +func (m *ExtensionHandlerResponse) GetHandled() bool { + if m != nil { + return m.Handled + } + return false +} + +func (m *ExtensionHandlerResponse) GetError() []string { + if m != nil { + return m.Error + } + return nil +} + +func (m *ExtensionHandlerResponse) GetValue() *google_protobuf.Any { + if m != nil { + return m.Value + } + return nil +} + +type Wrapper struct { + // version of the OpenAPI specification in which this extension was written. + Version string `protobuf:"bytes,1,opt,name=version" json:"version,omitempty"` + // Name of the extension + ExtensionName string `protobuf:"bytes,2,opt,name=extension_name,json=extensionName" json:"extension_name,omitempty"` + // Must be a valid yaml for the proto + Yaml string `protobuf:"bytes,3,opt,name=yaml" json:"yaml,omitempty"` +} + +func (m *Wrapper) Reset() { *m = Wrapper{} } +func (m *Wrapper) String() string { return proto.CompactTextString(m) } +func (*Wrapper) ProtoMessage() {} +func (*Wrapper) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } + +func (m *Wrapper) GetVersion() string { + if m != nil { + return m.Version + } + return "" +} + +func (m *Wrapper) GetExtensionName() string { + if m != nil { + return m.ExtensionName + } + return "" +} + +func (m *Wrapper) GetYaml() string { + if m != nil { + return m.Yaml + } + return "" +} + +func init() { + proto.RegisterType((*Version)(nil), "openapiextension.v1.Version") + proto.RegisterType((*ExtensionHandlerRequest)(nil), "openapiextension.v1.ExtensionHandlerRequest") + proto.RegisterType((*ExtensionHandlerResponse)(nil), "openapiextension.v1.ExtensionHandlerResponse") + proto.RegisterType((*Wrapper)(nil), "openapiextension.v1.Wrapper") +} + +func init() { proto.RegisterFile("extension.proto", fileDescriptor0) } + +var fileDescriptor0 = []byte{ + // 355 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x91, 0x4d, 0x4b, 0xf3, 0x40, + 0x1c, 0xc4, 0x49, 0xdf, 0xf2, 0x64, 0x1f, 0xb4, 0xb2, 0x16, 0x8d, 0xe2, 0xa1, 0x04, 0x84, 0x22, + 0xb8, 0xa5, 0x0a, 0xde, 0x5b, 0x28, 0xea, 0xc5, 0x96, 0x3d, 0xd4, 0x9b, 0x65, 0x9b, 0xfe, 0xdb, + 0x46, 0x92, 0xdd, 0x75, 0xf3, 0x62, 0xfb, 0x55, 0x3c, 0xfa, 0x49, 0x25, 0xbb, 0xd9, 0x7a, 0x50, + 0x6f, 0x99, 0x1f, 0x93, 0xfc, 0x67, 0x26, 0xa8, 0x0d, 0xdb, 0x0c, 0x78, 0x1a, 0x09, 0x4e, 0xa4, + 0x12, 0x99, 0xc0, 0xc7, 0x42, 0x02, 0x67, 0x32, 0xfa, 0xe6, 0xc5, 0xe0, 0xfc, 0x6c, 0x2d, 0xc4, + 0x3a, 0x86, 0xbe, 0xb6, 0x2c, 0xf2, 0x55, 0x9f, 0xf1, 0x9d, 0xf1, 0x07, 0x21, 0x72, 0x67, 0xa0, + 0x4a, 0x23, 0xee, 0xa0, 0x66, 0xc2, 0x5e, 0x85, 0xf2, 0x9d, 0xae, 0xd3, 0x6b, 0x52, 0x23, 0x34, + 0x8d, 0xb8, 0x50, 0x7e, 0xad, 0xa2, 0xa5, 0x28, 0xa9, 0x64, 0x59, 0xb8, 0xf1, 0xeb, 0x86, 0x6a, + 0x81, 0x4f, 0x50, 0x2b, 0xcd, 0x57, 0xab, 0x68, 0xeb, 0x37, 0xba, 0x4e, 0xcf, 0xa3, 0x95, 0x0a, + 0x3e, 0x1c, 0x74, 0x3a, 0xb6, 0x81, 0x1e, 0x18, 0x5f, 0xc6, 0xa0, 0x28, 0xbc, 0xe5, 0x90, 0x66, + 0xf8, 0x0e, 0xb9, 0xef, 0x8a, 0x49, 0x09, 0xe6, 0xee, 0xff, 0x9b, 0x0b, 0xf2, 0x4b, 0x05, 0xf2, + 0x6c, 0x3c, 0xd4, 0x9a, 0xf1, 0x3d, 0x3a, 0x0a, 0x45, 0x22, 0xa3, 0x18, 0xd4, 0xbc, 0x30, 0x0d, + 0x74, 0x98, 0xbf, 0x3e, 0x50, 0xb5, 0xa4, 0x6d, 0xfb, 0x56, 0x05, 0x82, 0x02, 0xf9, 0x3f, 0xb3, + 0xa5, 0x52, 0xf0, 0x14, 0xb0, 0x8f, 0xdc, 0x8d, 0x46, 0x4b, 0x1d, 0xee, 0x1f, 0xb5, 0xb2, 0x1c, + 0x00, 0x94, 0xd2, 0xb3, 0xd4, 0x7b, 0x1e, 0x35, 0x02, 0x5f, 0xa1, 0x66, 0xc1, 0xe2, 0x1c, 0xaa, + 0x24, 0x1d, 0x62, 0x86, 0x27, 0x76, 0x78, 0x32, 0xe4, 0x3b, 0x6a, 0x2c, 0xc1, 0x0b, 0x72, 0xab, + 0x52, 0xe5, 0x19, 0x5b, 0xc1, 0xd1, 0xc3, 0x59, 0x89, 0x2f, 0xd1, 0xe1, 0xbe, 0xc5, 0x9c, 0xb3, + 0x04, 0xf4, 0x6f, 0xf0, 0xe8, 0xc1, 0x9e, 0x3e, 0xb1, 0x04, 0x30, 0x46, 0x8d, 0x1d, 0x4b, 0x62, + 0x7d, 0xd6, 0xa3, 0xfa, 0x79, 0x74, 0x8d, 0xda, 0x42, 0xad, 0xed, 0x16, 0x21, 0x29, 0x06, 0x23, + 0x3c, 0x91, 0xc0, 0x87, 0xd3, 0xc7, 0x7d, 0xdf, 0xd9, 0x60, 0xea, 0x7c, 0xd6, 0xea, 0x93, 0xe1, + 0x78, 0xd1, 0xd2, 0x19, 0x6f, 0xbf, 0x02, 0x00, 0x00, 0xff, 0xff, 0xfc, 0x56, 0x40, 0x4d, 0x52, + 0x02, 0x00, 0x00, +} diff --git a/vendor/github.com/googleapis/gnostic/extensions/extension.proto b/vendor/github.com/googleapis/gnostic/extensions/extension.proto new file mode 100644 index 000000000..806760a13 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/extensions/extension.proto @@ -0,0 +1,93 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +import "google/protobuf/any.proto"; +package openapiextension.v1; + +// This option lets the proto compiler generate Java code inside the package +// name (see below) instead of inside an outer class. It creates a simpler +// developer experience by reducing one-level of name nesting and be +// consistent with most programming languages that don't support outer classes. +option java_multiple_files = true; + +// The Java outer classname should be the filename in UpperCamelCase. This +// class is only used to hold proto descriptor, so developers don't need to +// work with it directly. +option java_outer_classname = "OpenAPIExtensionV1"; + +// The Java package name must be proto package name with proper prefix. +option java_package = "org.openapic.v1"; + +// A reasonable prefix for the Objective-C symbols generated from the package. +// It should at a minimum be 3 characters long, all uppercase, and convention +// is to use an abbreviation of the package name. Something short, but +// hopefully unique enough to not conflict with things that may come along in +// the future. 'GPB' is reserved for the protocol buffer implementation itself. +// +option objc_class_prefix = "OAE"; // "OpenAPI Extension" + +// The version number of OpenAPI compiler. +message Version { + int32 major = 1; + int32 minor = 2; + int32 patch = 3; + // A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should + // be empty for mainline stable releases. + string suffix = 4; +} + +// An encoded Request is written to the ExtensionHandler's stdin. +message ExtensionHandlerRequest { + + // The OpenAPI descriptions that were explicitly listed on the command line. + // The specifications will appear in the order they are specified to openapic. + Wrapper wrapper = 1; + + // The version number of openapi compiler. + Version compiler_version = 3; +} + +// The extensions writes an encoded ExtensionHandlerResponse to stdout. +message ExtensionHandlerResponse { + + // true if the extension is handled by the extension handler; false otherwise + bool handled = 1; + + // Error message. If non-empty, the extension handling failed. + // The extension handler process should exit with status code zero + // even if it reports an error in this way. + // + // This should be used to indicate errors which prevent the extension from + // operating as intended. Errors which indicate a problem in gnostic + // itself -- such as the input Document being unparseable -- should be + // reported by writing a message to stderr and exiting with a non-zero + // status code. + repeated string error = 2; + + // text output + google.protobuf.Any value = 3; +} + +message Wrapper { + // version of the OpenAPI specification in which this extension was written. + string version = 1; + + // Name of the extension + string extension_name = 2; + + // Must be a valid yaml for the proto + string yaml = 3; +} diff --git a/vendor/github.com/googleapis/gnostic/extensions/extensions.go b/vendor/github.com/googleapis/gnostic/extensions/extensions.go new file mode 100644 index 000000000..94a8e62a7 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/extensions/extensions.go @@ -0,0 +1,82 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package openapiextension_v1 + +import ( + "fmt" + "io/ioutil" + "os" + + "github.com/golang/protobuf/proto" + "github.com/golang/protobuf/ptypes" +) + +type documentHandler func(version string, extensionName string, document string) +type extensionHandler func(name string, yamlInput string) (bool, proto.Message, error) + +func forInputYamlFromOpenapic(handler documentHandler) { + data, err := ioutil.ReadAll(os.Stdin) + if err != nil { + fmt.Println("File error:", err.Error()) + os.Exit(1) + } + if len(data) == 0 { + fmt.Println("No input data.") + os.Exit(1) + } + request := &ExtensionHandlerRequest{} + err = proto.Unmarshal(data, request) + if err != nil { + fmt.Println("Input error:", err.Error()) + os.Exit(1) + } + handler(request.Wrapper.Version, request.Wrapper.ExtensionName, request.Wrapper.Yaml) +} + +// ProcessExtension calles the handler for a specified extension. +func ProcessExtension(handleExtension extensionHandler) { + response := &ExtensionHandlerResponse{} + forInputYamlFromOpenapic( + func(version string, extensionName string, yamlInput string) { + var newObject proto.Message + var err error + + handled, newObject, err := handleExtension(extensionName, yamlInput) + if !handled { + responseBytes, _ := proto.Marshal(response) + os.Stdout.Write(responseBytes) + os.Exit(0) + } + + // If we reach here, then the extension is handled + response.Handled = true + if err != nil { + response.Error = append(response.Error, err.Error()) + responseBytes, _ := proto.Marshal(response) + os.Stdout.Write(responseBytes) + os.Exit(0) + } + response.Value, err = ptypes.MarshalAny(newObject) + if err != nil { + response.Error = append(response.Error, err.Error()) + responseBytes, _ := proto.Marshal(response) + os.Stdout.Write(responseBytes) + os.Exit(0) + } + }) + + responseBytes, _ := proto.Marshal(response) + os.Stdout.Write(responseBytes) +} diff --git a/vendor/github.com/googleapis/gnostic/extensions/sample/Makefile b/vendor/github.com/googleapis/gnostic/extensions/sample/Makefile new file mode 100755 index 000000000..bf8b1cd82 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/extensions/sample/Makefile @@ -0,0 +1,10 @@ + +build: + go get github.com/golang/protobuf/protoc-gen-go + cd ..; ./COMPILE-EXTENSION.sh + generate-gnostic --extension x-sampleone.json --out_dir=$(GOPATH)/src/github.com/googleapis/gnostic/extensions/sample/generated + cd generated/gnostic-x-sampleone/proto; protoc --go_out=Mgoogle/protobuf/any.proto=github.com/golang/protobuf/ptypes/any:. *.proto + cd generated/gnostic-x-sampleone; go get; go install + generate-gnostic --extension x-sampletwo.json --out_dir=$(GOPATH)/src/github.com/googleapis/gnostic/extensions/sample/generated + cd generated/gnostic-x-sampletwo/proto; protoc --go_out=Mgoogle/protobuf/any.proto=github.com/golang/protobuf/ptypes/any:. *.proto + cd generated/gnostic-x-sampletwo; go get; go install diff --git a/vendor/github.com/googleapis/gnostic/extensions/sample/x-sampleone.json b/vendor/github.com/googleapis/gnostic/extensions/sample/x-sampleone.json new file mode 100644 index 000000000..e07ee149a --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/extensions/sample/x-sampleone.json @@ -0,0 +1,55 @@ + { + "definitions": { + "Book": { + "type": "object", + "id": "x-sampleone-book", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "integer" + } + } + }, + "Shelf": { + "type": "object", + "id": "x-sampleone-shelf", + "required": [ + "foo1", + "bar" + ], + "properties": { + "foo1": { + "type": "integer", + "format": "int32" + }, + "bar": { + "type": "integer" + } + } + }, + "PrimitiveString": { + "type": "string", + "id": "x-sampleone-mysimplestring" + }, + "PrimitiveNumber": { + "type": "number", + "id": "x-sampleone-mysimplenumber" + }, + "PrimitiveBoolean": { + "type": "boolean", + "id": "x-sampleone-mysimpleboolean" + }, + "PrimitiveInt64": { + "type": "string", + "id": "x-sampleone-mysimpleint64" + } + + } + } diff --git a/vendor/github.com/googleapis/gnostic/extensions/sample/x-sampletwo.json b/vendor/github.com/googleapis/gnostic/extensions/sample/x-sampletwo.json new file mode 100644 index 000000000..db707380b --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/extensions/sample/x-sampletwo.json @@ -0,0 +1,38 @@ + { + "definitions": { + "Book": { + "type": "object", + "id": "x-sampletwo-book", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "integer" + } + } + }, + "Shelf": { + "type": "object", + "id": "x-sampletwo-shelf", + "required": [ + "foo1", + "bar" + ], + "properties": { + "foo1": { + "type": "integer", + "format": "int32" + }, + "bar": { + "type": "integer" + } + } + } + } + } diff --git a/vendor/github.com/googleapis/gnostic/generate-gnostic/README.md b/vendor/github.com/googleapis/gnostic/generate-gnostic/README.md new file mode 100644 index 000000000..5146b7e0d --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/generate-gnostic/README.md @@ -0,0 +1,13 @@ +# generate-gnostic + +## The gnostic compiler generator + +This directory contains code that generates a protocol buffer +representation and supporting code for a JSON schema. + +It is currently used to build models of OpenAPI specifications +and extensions which are described as "vendor extensions" in +OpenAPI 2.0 and "specification extensions" in OpenAPI 3.0. + +For usage information, run the `generate-gnostic` binary with no +options. diff --git a/vendor/github.com/googleapis/gnostic/generate-gnostic/domain.go b/vendor/github.com/googleapis/gnostic/generate-gnostic/domain.go new file mode 100644 index 000000000..8ddce2d56 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/generate-gnostic/domain.go @@ -0,0 +1,624 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "errors" + "fmt" + "log" + "sort" + "strings" + + "github.com/googleapis/gnostic/jsonschema" +) + +// Domain models a collection of types that is defined by a schema. +type Domain struct { + TypeModels map[string]*TypeModel // models of the types in the domain + Prefix string // type prefix to use + Schema *jsonschema.Schema // top-level schema + TypeNameOverrides map[string]string // a configured mapping from patterns to type names + PropertyNameOverrides map[string]string // a configured mapping from patterns to property names + ObjectTypeRequests map[string]*TypeRequest // anonymous types implied by type instantiation + MapTypeRequests map[string]string // "NamedObject" types that will be used to implement ordered maps + Version string // OpenAPI Version ("v2" or "v3") +} + +// NewDomain creates a domain representation. +func NewDomain(schema *jsonschema.Schema, version string) *Domain { + cc := &Domain{} + cc.TypeModels = make(map[string]*TypeModel, 0) + cc.TypeNameOverrides = make(map[string]string, 0) + cc.PropertyNameOverrides = make(map[string]string, 0) + cc.ObjectTypeRequests = make(map[string]*TypeRequest, 0) + cc.MapTypeRequests = make(map[string]string, 0) + cc.Schema = schema + cc.Version = version + return cc +} + +// TypeNameForStub returns a capitalized name to use for a generated type. +func (domain *Domain) TypeNameForStub(stub string) string { + + + return domain.Prefix + strings.ToUpper(stub[0:1]) + stub[1:len(stub)] +} + +// typeNameForReference returns a capitalized name to use for a generated type based on a JSON reference +func (domain *Domain) typeNameForReference(reference string) string { + parts := strings.Split(reference, "/") + first := parts[0] + last := parts[len(parts)-1] + if first == "#" { + return domain.TypeNameForStub(last) + } + return "Schema" +} + +// propertyNameForReference returns a property name to use for a JSON reference +func (domain *Domain) propertyNameForReference(reference string) *string { + parts := strings.Split(reference, "/") + first := parts[0] + last := parts[len(parts)-1] + if first == "#" { + return &last + } + return nil +} + +// arrayItemTypeForSchema determines the item type for arrays defined by a schema +func (domain *Domain) arrayItemTypeForSchema(propertyName string, schema *jsonschema.Schema) string { + // default + itemTypeName := "Any" + + if schema.Items != nil { + + if schema.Items.SchemaArray != nil { + + if len(*(schema.Items.SchemaArray)) > 0 { + ref := (*schema.Items.SchemaArray)[0].Ref + if ref != nil { + itemTypeName = domain.typeNameForReference(*ref) + } else { + types := (*schema.Items.SchemaArray)[0].Type + if types == nil { + // do nothing + } else if (types.StringArray != nil) && len(*(types.StringArray)) == 1 { + itemTypeName = (*types.StringArray)[0] + } else if (types.StringArray != nil) && len(*(types.StringArray)) > 1 { + itemTypeName = fmt.Sprintf("%+v", types.StringArray) + } else if types.String != nil { + itemTypeName = *(types.String) + } else { + itemTypeName = "UNKNOWN" + } + } + } + + } else if schema.Items.Schema != nil { + types := schema.Items.Schema.Type + + if schema.Items.Schema.Ref != nil { + itemTypeName = domain.typeNameForReference(*schema.Items.Schema.Ref) + } else if schema.Items.Schema.OneOf != nil { + // this type is implied by the "oneOf" + itemTypeName = domain.TypeNameForStub(propertyName + "Item") + domain.ObjectTypeRequests[itemTypeName] = + NewTypeRequest(itemTypeName, propertyName, schema.Items.Schema) + } else if types == nil { + // do nothing + } else if (types.StringArray != nil) && len(*(types.StringArray)) == 1 { + itemTypeName = (*types.StringArray)[0] + } else if (types.StringArray != nil) && len(*(types.StringArray)) > 1 { + itemTypeName = fmt.Sprintf("%+v", types.StringArray) + } else if types.String != nil { + itemTypeName = *(types.String) + } else { + itemTypeName = "UNKNOWN" + } + } + + } + return itemTypeName +} + +func (domain *Domain) buildTypeProperties(typeModel *TypeModel, schema *jsonschema.Schema) { + if schema.Properties != nil { + for _, pair := range *(schema.Properties) { + propertyName := pair.Name + propertySchema := pair.Value + if propertySchema.Ref != nil { + // the property schema is a reference, so we will add a property with the type of the referenced schema + propertyTypeName := domain.typeNameForReference(*(propertySchema.Ref)) + typeProperty := NewTypeProperty() + typeProperty.Name = propertyName + typeProperty.Type = propertyTypeName + typeModel.addProperty(typeProperty) + } else if propertySchema.Type != nil { + // the property schema specifies a type, so add a property with the specified type + if propertySchema.TypeIs("string") { + typeProperty := NewTypePropertyWithNameAndType(propertyName, "string") + if propertySchema.Description != nil { + typeProperty.Description = *propertySchema.Description + } + if propertySchema.Enumeration != nil { + allowedValues := make([]string, 0) + for _, enumValue := range *propertySchema.Enumeration { + if enumValue.String != nil { + allowedValues = append(allowedValues, *enumValue.String) + } + } + typeProperty.StringEnumValues = allowedValues + } + typeModel.addProperty(typeProperty) + } else if propertySchema.TypeIs("boolean") { + typeProperty := NewTypePropertyWithNameAndType(propertyName, "bool") + if propertySchema.Description != nil { + typeProperty.Description = *propertySchema.Description + } + typeModel.addProperty(typeProperty) + } else if propertySchema.TypeIs("number") { + typeProperty := NewTypePropertyWithNameAndType(propertyName, "float") + if propertySchema.Description != nil { + typeProperty.Description = *propertySchema.Description + } + typeModel.addProperty(typeProperty) + } else if propertySchema.TypeIs("integer") { + typeProperty := NewTypePropertyWithNameAndType(propertyName, "int") + if propertySchema.Description != nil { + typeProperty.Description = *propertySchema.Description + } + typeModel.addProperty(typeProperty) + } else if propertySchema.TypeIs("object") { + // the property has an "anonymous" object schema, so define a new type for it and request its creation + anonymousObjectTypeName := domain.TypeNameForStub(propertyName) + domain.ObjectTypeRequests[anonymousObjectTypeName] = + NewTypeRequest(anonymousObjectTypeName, propertyName, propertySchema) + // add a property with the type of the requested type + typeProperty := NewTypePropertyWithNameAndType(propertyName, anonymousObjectTypeName) + if propertySchema.Description != nil { + typeProperty.Description = *propertySchema.Description + } + typeModel.addProperty(typeProperty) + } else if propertySchema.TypeIs("array") { + // the property has an array type, so define it as a repeated property of the specified type + propertyTypeName := domain.arrayItemTypeForSchema(propertyName, propertySchema) + typeProperty := NewTypePropertyWithNameAndType(propertyName, propertyTypeName) + typeProperty.Repeated = true + if propertySchema.Description != nil { + typeProperty.Description = *propertySchema.Description + } + if typeProperty.Type == "string" { + itemSchema := propertySchema.Items.Schema + if itemSchema != nil { + if itemSchema.Enumeration != nil { + allowedValues := make([]string, 0) + for _, enumValue := range *itemSchema.Enumeration { + if enumValue.String != nil { + allowedValues = append(allowedValues, *enumValue.String) + } + } + typeProperty.StringEnumValues = allowedValues + } + } + } + typeModel.addProperty(typeProperty) + } else { + log.Printf("ignoring %+v, which has an unsupported property type '%s'", propertyName, propertySchema.Type.Description()) + } + } else if propertySchema.IsEmpty() { + // an empty schema can contain anything, so add an accessor for a generic object + typeName := "Any" + typeProperty := NewTypePropertyWithNameAndType(propertyName, typeName) + typeModel.addProperty(typeProperty) + } else if propertySchema.OneOf != nil { + anonymousObjectTypeName := domain.TypeNameForStub(propertyName + "Item") + domain.ObjectTypeRequests[anonymousObjectTypeName] = + NewTypeRequest(anonymousObjectTypeName, propertyName, propertySchema) + typeProperty := NewTypePropertyWithNameAndType(propertyName, anonymousObjectTypeName) + typeModel.addProperty(typeProperty) + } else if propertySchema.AnyOf != nil { + anonymousObjectTypeName := domain.TypeNameForStub(propertyName + "Item") + domain.ObjectTypeRequests[anonymousObjectTypeName] = + NewTypeRequest(anonymousObjectTypeName, propertyName, propertySchema) + typeProperty := NewTypePropertyWithNameAndType(propertyName, anonymousObjectTypeName) + typeModel.addProperty(typeProperty) + } else { + log.Printf("ignoring %s.%s, which has an unrecognized schema:\n%+v", typeModel.Name, propertyName, propertySchema.String()) + } + } + } +} + +func (domain *Domain) buildTypeRequirements(typeModel *TypeModel, schema *jsonschema.Schema) { + if schema.Required != nil { + typeModel.Required = (*schema.Required) + } +} + +func (domain *Domain) buildPatternPropertyAccessors(typeModel *TypeModel, schema *jsonschema.Schema) { + if schema.PatternProperties != nil { + typeModel.OpenPatterns = make([]string, 0) + for _, pair := range *(schema.PatternProperties) { + propertyPattern := pair.Name + propertySchema := pair.Value + typeModel.OpenPatterns = append(typeModel.OpenPatterns, propertyPattern) + if propertySchema.Ref != nil { + typeName := domain.typeNameForReference(*propertySchema.Ref) + if _, ok := domain.TypeNameOverrides[typeName]; ok { + typeName = domain.TypeNameOverrides[typeName] + } + propertyName := domain.typeNameForReference(*propertySchema.Ref) + if _, ok := domain.PropertyNameOverrides[propertyName]; ok { + propertyName = domain.PropertyNameOverrides[propertyName] + } + propertyTypeName := fmt.Sprintf("Named%s", typeName) + property := NewTypePropertyWithNameTypeAndPattern(propertyName, propertyTypeName, propertyPattern) + property.Implicit = true + property.MapType = typeName + property.Repeated = true + domain.MapTypeRequests[property.MapType] = property.MapType + typeModel.addProperty(property) + } + } + } +} + +func (domain *Domain) buildAdditionalPropertyAccessors(typeModel *TypeModel, schema *jsonschema.Schema) { + if schema.AdditionalProperties != nil { + if schema.AdditionalProperties.Boolean != nil { + if *schema.AdditionalProperties.Boolean == true { + typeModel.Open = true + propertyName := "additionalProperties" + typeName := "NamedAny" + property := NewTypePropertyWithNameAndType(propertyName, typeName) + property.Implicit = true + property.MapType = "Any" + property.Repeated = true + domain.MapTypeRequests[property.MapType] = property.MapType + typeModel.addProperty(property) + return + } + } else if schema.AdditionalProperties.Schema != nil { + typeModel.Open = true + schema := schema.AdditionalProperties.Schema + if schema.Ref != nil { + propertyName := "additionalProperties" + mapType := domain.typeNameForReference(*schema.Ref) + typeName := fmt.Sprintf("Named%s", mapType) + property := NewTypePropertyWithNameAndType(propertyName, typeName) + property.Implicit = true + property.MapType = mapType + property.Repeated = true + domain.MapTypeRequests[property.MapType] = property.MapType + typeModel.addProperty(property) + return + } else if schema.Type != nil { + typeName := *schema.Type.String + if typeName == "string" { + propertyName := "additionalProperties" + typeName := "NamedString" + property := NewTypePropertyWithNameAndType(propertyName, typeName) + property.Implicit = true + property.MapType = "string" + property.Repeated = true + domain.MapTypeRequests[property.MapType] = property.MapType + typeModel.addProperty(property) + return + } else if typeName == "array" { + if schema.Items != nil { + itemType := *schema.Items.Schema.Type.String + if itemType == "string" { + propertyName := "additionalProperties" + typeName := "NamedStringArray" + property := NewTypePropertyWithNameAndType(propertyName, typeName) + property.Implicit = true + property.MapType = "StringArray" + property.Repeated = true + domain.MapTypeRequests[property.MapType] = property.MapType + typeModel.addProperty(property) + return + } + } + } + } else if schema.OneOf != nil { + propertyTypeName := domain.TypeNameForStub(typeModel.Name + "Item") + propertyName := "additionalProperties" + typeName := fmt.Sprintf("Named%s", propertyTypeName) + property := NewTypePropertyWithNameAndType(propertyName, typeName) + property.Implicit = true + property.MapType = propertyTypeName + property.Repeated = true + domain.MapTypeRequests[property.MapType] = property.MapType + typeModel.addProperty(property) + + domain.ObjectTypeRequests[propertyTypeName] = + NewTypeRequest(propertyTypeName, propertyName, schema) + } + } + } +} + +func (domain *Domain) buildOneOfAccessors(typeModel *TypeModel, schema *jsonschema.Schema) { + oneOfs := schema.OneOf + if oneOfs == nil { + return + } + typeModel.Open = true + typeModel.OneOfWrapper = true + for _, oneOf := range *oneOfs { + if oneOf.Ref != nil { + ref := *oneOf.Ref + typeName := domain.typeNameForReference(ref) + propertyName := domain.propertyNameForReference(ref) + + if propertyName != nil { + typeProperty := NewTypePropertyWithNameAndType(*propertyName, typeName) + typeModel.addProperty(typeProperty) + } + } else if oneOf.Type != nil && oneOf.Type.String != nil { + switch *oneOf.Type.String { + case "boolean": + typeProperty := NewTypePropertyWithNameAndType("boolean", "bool") + typeModel.addProperty(typeProperty) + case "integer": + typeProperty := NewTypePropertyWithNameAndType("integer", "int") + typeModel.addProperty(typeProperty) + case "number": + typeProperty := NewTypePropertyWithNameAndType("number", "float") + typeModel.addProperty(typeProperty) + case "string": + typeProperty := NewTypePropertyWithNameAndType("string", "string") + typeModel.addProperty(typeProperty) + default: + log.Printf("Unsupported oneOf:\n%+v", oneOf.String()) + } + } else { + log.Printf("Unsupported oneOf:\n%+v", oneOf.String()) + } + + } +} + +func schemaIsContainedInArray(s1 *jsonschema.Schema, s2 *jsonschema.Schema) bool { + if s2.TypeIs("array") { + if s2.Items.Schema != nil { + if s1.IsEqual(s2.Items.Schema) { + return true + } + } + } + return false +} + +func (domain *Domain) addAnonymousAccessorForSchema( + typeModel *TypeModel, + schema *jsonschema.Schema, + repeated bool) { + ref := schema.Ref + if ref != nil { + typeName := domain.typeNameForReference(*ref) + propertyName := domain.propertyNameForReference(*ref) + if propertyName != nil { + property := NewTypePropertyWithNameAndType(*propertyName, typeName) + property.Repeated = true + typeModel.addProperty(property) + typeModel.IsItemArray = true + } + } else { + typeName := "string" + propertyName := "value" + property := NewTypePropertyWithNameAndType(propertyName, typeName) + property.Repeated = true + typeModel.addProperty(property) + typeModel.IsStringArray = true + } +} + +func (domain *Domain) buildAnyOfAccessors(typeModel *TypeModel, schema *jsonschema.Schema) { + anyOfs := schema.AnyOf + if anyOfs == nil { + return + } + if len(*anyOfs) == 2 { + if schemaIsContainedInArray((*anyOfs)[0], (*anyOfs)[1]) { + //log.Printf("ARRAY OF %+v", (*anyOfs)[0].String()) + schema := (*anyOfs)[0] + domain.addAnonymousAccessorForSchema(typeModel, schema, true) + } else if schemaIsContainedInArray((*anyOfs)[1], (*anyOfs)[0]) { + //log.Printf("ARRAY OF %+v", (*anyOfs)[1].String()) + schema := (*anyOfs)[1] + domain.addAnonymousAccessorForSchema(typeModel, schema, true) + } else { + for _, anyOf := range *anyOfs { + ref := anyOf.Ref + if ref != nil { + typeName := domain.typeNameForReference(*ref) + propertyName := domain.propertyNameForReference(*ref) + if propertyName != nil { + property := NewTypePropertyWithNameAndType(*propertyName, typeName) + typeModel.addProperty(property) + } + } else { + typeName := "bool" + propertyName := "boolean" + property := NewTypePropertyWithNameAndType(propertyName, typeName) + typeModel.addProperty(property) + } + } + } + } else { + log.Printf("Unhandled anyOfs:\n%s", schema.String()) + } +} + +func (domain *Domain) buildDefaultAccessors(typeModel *TypeModel, schema *jsonschema.Schema) { + typeModel.Open = true + propertyName := "additionalProperties" + typeName := "NamedAny" + property := NewTypePropertyWithNameAndType(propertyName, typeName) + property.MapType = "Any" + property.Repeated = true + domain.MapTypeRequests[property.MapType] = property.MapType + typeModel.addProperty(property) +} + +// BuildTypeForDefinition creates a type representation for a schema definition. +func (domain *Domain) BuildTypeForDefinition( + typeName string, + propertyName string, + schema *jsonschema.Schema) *TypeModel { + if (schema.Type == nil) || (*schema.Type.String == "object") { + return domain.buildTypeForDefinitionObject(typeName, propertyName, schema) + } + return nil +} + +func (domain *Domain) buildTypeForDefinitionObject( + typeName string, + propertyName string, + schema *jsonschema.Schema) *TypeModel { + typeModel := NewTypeModel() + typeModel.Name = typeName + if schema.IsEmpty() { + domain.buildDefaultAccessors(typeModel, schema) + } else { + if schema.Description != nil { + typeModel.Description = *schema.Description + } + domain.buildTypeProperties(typeModel, schema) + domain.buildTypeRequirements(typeModel, schema) + domain.buildPatternPropertyAccessors(typeModel, schema) + domain.buildAdditionalPropertyAccessors(typeModel, schema) + domain.buildOneOfAccessors(typeModel, schema) + domain.buildAnyOfAccessors(typeModel, schema) + } + return typeModel +} + +// Build builds a domain model. +func (domain *Domain) Build() (err error) { + if (domain.Schema == nil) || (domain.Schema.Definitions == nil) { + return errors.New("missing definitions section") + } + // create a type for the top-level schema + typeName := domain.Prefix + "Document" + typeModel := NewTypeModel() + typeModel.Name = typeName + domain.buildTypeProperties(typeModel, domain.Schema) + domain.buildTypeRequirements(typeModel, domain.Schema) + domain.buildPatternPropertyAccessors(typeModel, domain.Schema) + domain.buildAdditionalPropertyAccessors(typeModel, domain.Schema) + domain.buildOneOfAccessors(typeModel, domain.Schema) + domain.buildAnyOfAccessors(typeModel, domain.Schema) + if len(typeModel.Properties) > 0 { + domain.TypeModels[typeName] = typeModel + } + + // create a type for each object defined in the schema + if domain.Schema.Definitions != nil { + for _, pair := range *(domain.Schema.Definitions) { + definitionName := pair.Name + definitionSchema := pair.Value + typeName := domain.TypeNameForStub(definitionName) + typeModel := domain.BuildTypeForDefinition(typeName, definitionName, definitionSchema) + if typeModel != nil { + domain.TypeModels[typeName] = typeModel + } + } + } + // iterate over anonymous object types to be instantiated and generate a type for each + for typeName, typeRequest := range domain.ObjectTypeRequests { + domain.TypeModels[typeRequest.Name] = + domain.buildTypeForDefinitionObject(typeName, typeRequest.PropertyName, typeRequest.Schema) + } + + // iterate over map item types to be instantiated and generate a type for each + mapTypeNames := make([]string, 0) + for mapTypeName := range domain.MapTypeRequests { + mapTypeNames = append(mapTypeNames, mapTypeName) + } + sort.Strings(mapTypeNames) + + for _, mapTypeName := range mapTypeNames { + typeName := "Named" + strings.Title(mapTypeName) + typeModel := NewTypeModel() + typeModel.Name = typeName + typeModel.Description = fmt.Sprintf( + "Automatically-generated message used to represent maps of %s as ordered (name,value) pairs.", + mapTypeName) + typeModel.IsPair = true + typeModel.PairValueType = mapTypeName + + nameProperty := NewTypeProperty() + nameProperty.Name = "name" + nameProperty.Type = "string" + nameProperty.Description = "Map key" + typeModel.addProperty(nameProperty) + + valueProperty := NewTypeProperty() + valueProperty.Name = "value" + valueProperty.Type = mapTypeName + valueProperty.Description = "Mapped value" + typeModel.addProperty(valueProperty) + + domain.TypeModels[typeName] = typeModel + } + + // add a type for string arrays + stringArrayType := NewTypeModel() + stringArrayType.Name = "StringArray" + stringProperty := NewTypeProperty() + stringProperty.Name = "value" + stringProperty.Type = "string" + stringProperty.Repeated = true + stringArrayType.addProperty(stringProperty) + domain.TypeModels[stringArrayType.Name] = stringArrayType + + // add a type for "Any" + anyType := NewTypeModel() + anyType.Name = "Any" + anyType.Open = true + anyType.IsBlob = true + valueProperty := NewTypeProperty() + valueProperty.Name = "value" + valueProperty.Type = "google.protobuf.Any" + anyType.addProperty(valueProperty) + yamlProperty := NewTypeProperty() + yamlProperty.Name = "yaml" + yamlProperty.Type = "string" + anyType.addProperty(yamlProperty) + domain.TypeModels[anyType.Name] = anyType + return err +} + +func (domain *Domain) sortedTypeNames() []string { + typeNames := make([]string, 0) + for typeName := range domain.TypeModels { + typeNames = append(typeNames, typeName) + } + sort.Strings(typeNames) + return typeNames +} + +// Description returns a string representation of a domain. +func (domain *Domain) Description() string { + typeNames := domain.sortedTypeNames() + result := "" + for _, typeName := range typeNames { + result += domain.TypeModels[typeName].description() + } + return result +} diff --git a/vendor/github.com/googleapis/gnostic/generate-gnostic/generate-compiler.go b/vendor/github.com/googleapis/gnostic/generate-gnostic/generate-compiler.go new file mode 100644 index 000000000..5cc48ec0f --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/generate-gnostic/generate-compiler.go @@ -0,0 +1,913 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "fmt" + "regexp" + "sort" + "strings" + + "github.com/googleapis/gnostic/printer" +) + +// patternNames hands out unique names for a given string. +type patternNames struct { + prefix string + values map[string]int + last int + + specialCase map[string]func(variable string) string +} + +// SpecialCaseExpression returns true if the provided regex can be inlined as a faster +// expression. +func (p *patternNames) SpecialCaseExpression(value, variable string) (code string, ok bool) { + fn, ok := p.specialCase[value] + if !ok { + return "", false + } + return fn(variable), ok +} + +// VariableName returns the variable name for the given value. +func (p *patternNames) VariableName(value string) string { + num, ok := p.values[value] + if !ok { + if p.values == nil { + p.values = make(map[string]int) + } + num = p.last + p.last++ + p.values[value] = num + } + return fmt.Sprintf("%s%d", p.prefix, num) +} + +func (p *patternNames) Names() map[string]string { + names := make(map[string]string) + for value, num := range p.values { + names[fmt.Sprintf("%s%d", p.prefix, num)] = value + } + return names +} + +// GenerateCompiler generates the compiler code for a domain. +func (domain *Domain) GenerateCompiler(packageName string, license string, imports []string) string { + code := &printer.Code{} + code.Print(license) + code.Print("// THIS FILE IS AUTOMATICALLY GENERATED.\n") + + // generate package declaration + code.Print("package %s\n", packageName) + + code.Print("import (") + for _, filename := range imports { + code.Print("\"" + filename + "\"") + } + code.Print(")\n") + + // generate a simple Version() function + code.Print("// Version returns the package name (and OpenAPI version).") + code.Print("func Version() string {") + code.Print(" return \"%s\"", packageName) + code.Print("}\n") + + typeNames := domain.sortedTypeNames() + + regexPatterns := &patternNames{ + prefix: "pattern", + specialCase: map[string]func(string) string{ + "^x-": func(variable string) string { return fmt.Sprintf("strings.HasPrefix(%s, \"x-\")", variable) }, + "^/": func(variable string) string { return fmt.Sprintf("strings.HasPrefix(%s, \"/\")", variable) }, + "^": func(_ string) string { return "true" }, + }, + } + + // generate NewX() constructor functions for each type + for _, typeName := range typeNames { + domain.generateConstructorForType(code, typeName, regexPatterns) + } + + // generate ResolveReferences() methods for each type + for _, typeName := range typeNames { + domain.generateResolveReferencesMethodsForType(code, typeName) + } + + // generate ToRawInfo() methods for each type + for _, typeName := range typeNames { + domain.generateToRawInfoMethodForType(code, typeName) + } + + domain.generateConstantVariables(code, regexPatterns) + + return code.String() +} + +func escapeSlashes(pattern string) string { + return strings.Replace(pattern, "\\", "\\\\", -1) +} + +var subpatternPattern = regexp.MustCompile("^.*(\\{.*\\}).*$") + +func nameForPattern(regexPatterns *patternNames, pattern string) string { + if !strings.HasPrefix(pattern, "^") { + if matches := subpatternPattern.FindStringSubmatch(pattern); matches != nil { + match := string(matches[1]) + pattern = strings.Replace(pattern, match, ".*", -1) + } + } + return regexPatterns.VariableName(pattern) +} + +func (domain *Domain) generateConstructorForType(code *printer.Code, typeName string, regexPatterns *patternNames) { + code.Print("// New%s creates an object of type %s if possible, returning an error if not.", typeName, typeName) + code.Print("func New%s(in interface{}, context *compiler.Context) (*%s, error) {", typeName, typeName) + code.Print("errors := make([]error, 0)") + + typeModel := domain.TypeModels[typeName] + parentTypeName := typeName + + if typeModel.IsStringArray { + code.Print("x := &TypeItem{}") + code.Print("switch in := in.(type) {") + code.Print("case string:") + code.Print(" x.Value = make([]string, 0)") + code.Print(" x.Value = append(x.Value, in)") + code.Print("case []interface{}:") + code.Print(" x.Value = make([]string, 0)") + code.Print(" for _, v := range in {") + code.Print(" value, ok := v.(string)") + code.Print(" if ok {") + code.Print(" x.Value = append(x.Value, value)") + code.Print(" } else {") + code.Print(" message := fmt.Sprintf(\"has unexpected value for string array element: %%+v (%%T)\", value, value)") + code.Print(" errors = append(errors, compiler.NewError(context, message))") + code.Print(" }") + code.Print(" }") + code.Print("default:") + code.Print(" message := fmt.Sprintf(\"has unexpected value for string array: %%+v (%%T)\", in, in)") + code.Print(" errors = append(errors, compiler.NewError(context, message))") + code.Print("}") + } else if typeModel.IsItemArray { + if domain.Version == "v2" { + code.Print("x := &ItemsItem{}") + code.Print("m, ok := compiler.UnpackMap(in)") + code.Print("if !ok {") + code.Print(" message := fmt.Sprintf(\"has unexpected value for item array: %%+v (%%T)\", in, in)") + code.Print(" errors = append(errors, compiler.NewError(context, message))") + code.Print("} else {") + code.Print(" x.Schema = make([]*Schema, 0)") + code.Print(" y, err := NewSchema(m, compiler.NewContext(\"\", context))") + code.Print(" if err != nil {") + code.Print(" return nil, err") + code.Print(" }") + code.Print(" x.Schema = append(x.Schema, y)") + code.Print("}") + } else if domain.Version == "v3" { + code.Print("x := &ItemsItem{}") + code.Print("m, ok := compiler.UnpackMap(in)") + code.Print("if !ok {") + code.Print(" message := fmt.Sprintf(\"has unexpected value for item array: %%+v (%%T)\", in, in)") + code.Print(" errors = append(errors, compiler.NewError(context, message))") + code.Print("} else {") + code.Print(" x.SchemaOrReference = make([]*SchemaOrReference, 0)") + code.Print(" y, err := NewSchemaOrReference(m, compiler.NewContext(\"\", context))") + code.Print(" if err != nil {") + code.Print(" return nil, err") + code.Print(" }") + code.Print(" x.SchemaOrReference = append(x.SchemaOrReference, y)") + code.Print("}") + } + } else if typeModel.IsBlob { + code.Print("x := &Any{}") + code.Print("bytes, _ := yaml.Marshal(in)") + code.Print("x.Yaml = string(bytes)") + } else if typeModel.Name == "StringArray" { + code.Print("x := &StringArray{}") + code.Print("a, ok := in.([]interface{})") + code.Print("if !ok {") + code.Print(" message := fmt.Sprintf(\"has unexpected value for StringArray: %%+v (%%T)\", in, in)") + code.Print(" errors = append(errors, compiler.NewError(context, message))") + code.Print("} else {") + code.Print(" x.Value = make([]string, 0)") + code.Print(" for _, s := range a {") + code.Print(" x.Value = append(x.Value, s.(string))") + code.Print(" }") + code.Print("}") + } else if typeModel.Name == "Primitive" { + code.Print(" x := &Primitive{}") + code.Print(" matched := false") + code.Print(" switch in := in.(type) {") + code.Print(" case bool:") + code.Print(" x.Oneof = &Primitive_Boolean{Boolean: in}") + code.Print(" matched = true") + code.Print(" case string:") + code.Print(" x.Oneof = &Primitive_String_{String_: in}") + code.Print(" matched = true") + code.Print(" case int64:") + code.Print(" x.Oneof = &Primitive_Integer{Integer: in}") + code.Print(" matched = true") + code.Print(" case int32:") + code.Print(" x.Oneof = &Primitive_Integer{Integer: int64(in)}") + code.Print(" matched = true") + code.Print(" case int:") + code.Print(" x.Oneof = &Primitive_Integer{Integer: int64(in)}") + code.Print(" matched = true") + code.Print(" case float64:") + code.Print(" x.Oneof = &Primitive_Number{Number: in}") + code.Print(" matched = true") + code.Print(" case float32:") + code.Print(" x.Oneof = &Primitive_Number{Number: float64(in)}") + code.Print(" matched = true") + code.Print(" }") + code.Print(" if matched {") + code.Print(" // since the oneof matched one of its possibilities, discard any matching errors") + code.Print(" errors = make([]error, 0)") + code.Print(" }") + } else if typeModel.Name == "SpecificationExtension" { + code.Print(" x := &SpecificationExtension{}") + code.Print(" matched := false") + code.Print(" switch in := in.(type) {") + code.Print(" case bool:") + code.Print(" x.Oneof = &SpecificationExtension_Boolean{Boolean: in}") + code.Print(" matched = true") + code.Print(" case string:") + code.Print(" x.Oneof = &SpecificationExtension_String_{String_: in}") + code.Print(" matched = true") + code.Print(" case int64:") + code.Print(" x.Oneof = &SpecificationExtension_Number{Number: float64(in)}") + code.Print(" matched = true") + code.Print(" case int32:") + code.Print(" x.Oneof = &SpecificationExtension_Number{Number: float64(in)}") + code.Print(" matched = true") + code.Print(" case int:") + code.Print(" x.Oneof = &SpecificationExtension_Number{Number: float64(in)}") + code.Print(" matched = true") + code.Print(" case float64:") + code.Print(" x.Oneof = &SpecificationExtension_Number{Number: in}") + code.Print(" matched = true") + code.Print(" case float32:") + code.Print(" x.Oneof = &SpecificationExtension_Number{Number: float64(in)}") + code.Print(" matched = true") + code.Print(" }") + code.Print(" if matched {") + code.Print(" // since the oneof matched one of its possibilities, discard any matching errors") + code.Print(" errors = make([]error, 0)") + code.Print(" }") + } else if typeModel.Name == "DefaultType" { + code.Print(" x := &DefaultType{}") + code.Print(" matched := false") + code.Print(" switch in := in.(type) {") + code.Print(" case bool:") + code.Print(" x.Oneof = &DefaultType_Boolean{Boolean: in}") + code.Print(" matched = true") + code.Print(" case string:") + code.Print(" x.Oneof = &DefaultType_String_{String_: in}") + code.Print(" matched = true") + code.Print(" case int64:") + code.Print(" x.Oneof = &DefaultType_Number{Number: float64(in)}") + code.Print(" matched = true") + code.Print(" case int32:") + code.Print(" x.Oneof = &DefaultType_Number{Number: float64(in)}") + code.Print(" matched = true") + code.Print(" case int:") + code.Print(" x.Oneof = &DefaultType_Number{Number: float64(in)}") + code.Print(" matched = true") + code.Print(" case float64:") + code.Print(" x.Oneof = &DefaultType_Number{Number: in}") + code.Print(" matched = true") + code.Print(" case float32:") + code.Print(" x.Oneof = &DefaultType_Number{Number: float64(in)}") + code.Print(" matched = true") + code.Print(" }") + code.Print(" if matched {") + code.Print(" // since the oneof matched one of its possibilities, discard any matching errors") + code.Print(" errors = make([]error, 0)") + code.Print(" }") + } else { + oneOfWrapper := typeModel.OneOfWrapper + + code.Print("x := &%s{}", typeName) + + if oneOfWrapper { + code.Print("matched := false") + } + + unpackAtTop := !oneOfWrapper || len(typeModel.Required) > 0 + if unpackAtTop { + code.Print("m, ok := compiler.UnpackMap(in)") + code.Print("if !ok {") + code.Print(" message := fmt.Sprintf(\"has unexpected value: %%+v (%%T)\", in, in)") + code.Print(" errors = append(errors, compiler.NewError(context, message))") + code.Print("} else {") + } + if len(typeModel.Required) > 0 { + // verify that map includes all required keys + keyString := "" + sort.Strings(typeModel.Required) + for _, k := range typeModel.Required { + if keyString != "" { + keyString += "," + } + keyString += "\"" + keyString += k + keyString += "\"" + } + code.Print("requiredKeys := []string{%s}", keyString) + code.Print("missingKeys := compiler.MissingKeysInMap(m, requiredKeys)") + code.Print("if len(missingKeys) > 0 {") + code.Print(" message := fmt.Sprintf(\"is missing required %%s: %%+v\", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, \", \"))") + code.Print(" errors = append(errors, compiler.NewError(context, message))") + code.Print("}") + } + + if !typeModel.Open { + // verify that map has no unspecified keys + allowedKeys := make([]string, 0) + for _, property := range typeModel.Properties { + if !property.Implicit { + allowedKeys = append(allowedKeys, property.Name) + } + } + sort.Strings(allowedKeys) + allowedKeyString := "" + for _, allowedKey := range allowedKeys { + if allowedKeyString != "" { + allowedKeyString += "," + } + allowedKeyString += "\"" + allowedKeyString += allowedKey + allowedKeyString += "\"" + } + allowedPatternString := "" + if typeModel.OpenPatterns != nil { + for _, pattern := range typeModel.OpenPatterns { + if allowedPatternString != "" { + allowedPatternString += "," + } + allowedPatternString += nameForPattern(regexPatterns, pattern) + } + } + // verify that map includes only allowed keys and patterns + code.Print("allowedKeys := []string{%s}", allowedKeyString) + if len(allowedPatternString) > 0 { + code.Print("allowedPatterns := []*regexp.Regexp{%s}", allowedPatternString) + } else { + code.Print("var allowedPatterns []*regexp.Regexp") + + } + code.Print("invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns)") + code.Print("if len(invalidKeys) > 0 {") + code.Print(" message := fmt.Sprintf(\"has invalid %%s: %%+v\", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, \", \"))") + code.Print(" errors = append(errors, compiler.NewError(context, message))") + code.Print("}") + } + + var fieldNumber = 0 + for _, propertyModel := range typeModel.Properties { + propertyName := propertyModel.Name + fieldNumber++ + propertyType := propertyModel.Type + if propertyType == "int" { + propertyType = "int64" + } + var displayName = propertyName + if displayName == "$ref" { + displayName = "_ref" + } + if displayName == "$schema" { + displayName = "_schema" + } + displayName = camelCaseToSnakeCase(displayName) + + var line = fmt.Sprintf("%s %s = %d;", propertyType, displayName, fieldNumber) + if propertyModel.Repeated { + line = "repeated " + line + } + code.Print("// " + line) + + fieldName := strings.Title(snakeCaseToCamelCase(propertyName)) + if propertyName == "$ref" { + fieldName = "XRef" + } + + typeModel, typeFound := domain.TypeModels[propertyType] + if typeFound && !typeModel.IsPair { + if propertyModel.Repeated { + code.Print("v%d := compiler.MapValueForKey(m, \"%s\")", fieldNumber, propertyName) + code.Print("if (v%d != nil) {", fieldNumber) + code.Print(" // repeated %s", typeModel.Name) + code.Print(" x.%s = make([]*%s, 0)", fieldName, typeModel.Name) + code.Print(" a, ok := v%d.([]interface{})", fieldNumber) + code.Print(" if ok {") + code.Print(" for _, item := range a {") + code.Print(" y, err := New%s(item, compiler.NewContext(\"%s\", context))", typeModel.Name, propertyName) + code.Print(" if err != nil {") + code.Print(" errors = append(errors, err)") + code.Print(" }") + code.Print(" x.%s = append(x.%s, y)", fieldName, fieldName) + code.Print(" }") + code.Print(" }") + code.Print("}") + } else { + if oneOfWrapper { + code.Print("{") + if !unpackAtTop { + code.Print(" m, ok := compiler.UnpackMap(in)") + code.Print(" if ok {") + } + code.Print(" // errors might be ok here, they mean we just don't have the right subtype") + code.Print(" t, matchingError := New%s(m, compiler.NewContext(\"%s\", context))", typeModel.Name, propertyName) + code.Print(" if matchingError == nil {") + code.Print(" x.Oneof = &%s_%s{%s: t}", parentTypeName, typeModel.Name, typeModel.Name) + code.Print(" matched = true") + code.Print(" } else {") + code.Print(" errors = append(errors, matchingError)") + code.Print(" }") + if !unpackAtTop { + code.Print(" }") + } + code.Print("}") + } else { + code.Print("v%d := compiler.MapValueForKey(m, \"%s\")", fieldNumber, propertyName) + code.Print("if (v%d != nil) {", fieldNumber) + code.Print(" var err error") + code.Print(" x.%s, err = New%s(v%d, compiler.NewContext(\"%s\", context))", + fieldName, typeModel.Name, fieldNumber, propertyName) + code.Print(" if err != nil {") + code.Print(" errors = append(errors, err)") + code.Print(" }") + code.Print("}") + } + } + } else if propertyType == "string" { + if propertyModel.Repeated { + code.Print("v%d := compiler.MapValueForKey(m, \"%s\")", fieldNumber, propertyName) + code.Print("if (v%d != nil) {", fieldNumber) + code.Print(" v, ok := v%d.([]interface{})", fieldNumber) + code.Print(" if ok {") + code.Print(" x.%s = compiler.ConvertInterfaceArrayToStringArray(v)", fieldName) + code.Print(" } else {") + code.Print(" message := fmt.Sprintf(\"has unexpected value for %s: %%+v (%%T)\", v%d, v%d)", propertyName, fieldNumber, fieldNumber) + code.Print(" errors = append(errors, compiler.NewError(context, message))") + code.Print("}") + + if propertyModel.StringEnumValues != nil { + code.Print("// check for valid enum values") + code.Print("// %+v", propertyModel.StringEnumValues) + + stringArrayLiteral := "[]string{" + for i, item := range propertyModel.StringEnumValues { + if i > 0 { + stringArrayLiteral += "," + } + stringArrayLiteral += "\"" + item + "\"" + } + stringArrayLiteral += "}" + code.Print("if ok && !compiler.StringArrayContainsValues(%s, x.%s) {", stringArrayLiteral, fieldName) + code.Print(" message := fmt.Sprintf(\"has unexpected value for %s: %%+v\", v%d)", propertyName, fieldNumber) + code.Print(" errors = append(errors, compiler.NewError(context, message))") + code.Print("}") + } + + code.Print("}") + } else { + code.Print("v%d := compiler.MapValueForKey(m, \"%s\")", fieldNumber, propertyName) + code.Print("if (v%d != nil) {", fieldNumber) + code.Print(" x.%s, ok = v%d.(string)", fieldName, fieldNumber) + code.Print(" if !ok {") + code.Print(" message := fmt.Sprintf(\"has unexpected value for %s: %%+v (%%T)\", v%d, v%d)", propertyName, fieldNumber, fieldNumber) + code.Print(" errors = append(errors, compiler.NewError(context, message))") + code.Print(" }") + + if propertyModel.StringEnumValues != nil { + code.Print("// check for valid enum values") + code.Print("// %+v", propertyModel.StringEnumValues) + + stringArrayLiteral := "[]string{" + for i, item := range propertyModel.StringEnumValues { + if i > 0 { + stringArrayLiteral += "," + } + stringArrayLiteral += "\"" + item + "\"" + } + stringArrayLiteral += "}" + + code.Print("if ok && !compiler.StringArrayContainsValue(%s, x.%s) {", stringArrayLiteral, fieldName) + code.Print(" message := fmt.Sprintf(\"has unexpected value for %s: %%+v (%%T)\", v%d, v%d)", propertyName, fieldNumber, fieldNumber) + code.Print(" errors = append(errors, compiler.NewError(context, message))") + code.Print("}") + } + code.Print("}") + } + } else if propertyType == "float" { + code.Print("v%d := compiler.MapValueForKey(m, \"%s\")", fieldNumber, propertyName) + code.Print("if (v%d != nil) {", fieldNumber) + code.Print(" switch v%d := v%d.(type) {", fieldNumber, fieldNumber) + code.Print(" case float64:") + code.Print(" x.%s = v%d", fieldName, fieldNumber) + code.Print(" case float32:") + code.Print(" x.%s = float64(v%d)", fieldName, fieldNumber) + code.Print(" case uint64:") + code.Print(" x.%s = float64(v%d)", fieldName, fieldNumber) + code.Print(" case uint32:") + code.Print(" x.%s = float64(v%d)", fieldName, fieldNumber) + code.Print(" case int64:") + code.Print(" x.%s = float64(v%d)", fieldName, fieldNumber) + code.Print(" case int32:") + code.Print(" x.%s = float64(v%d)", fieldName, fieldNumber) + code.Print(" case int:") + code.Print(" x.%s = float64(v%d)", fieldName, fieldNumber) + code.Print(" default:") + code.Print(" message := fmt.Sprintf(\"has unexpected value for %s: %%+v (%%T)\", v%d, v%d)", propertyName, fieldNumber, fieldNumber) + code.Print(" errors = append(errors, compiler.NewError(context, message))") + code.Print(" }") + code.Print("}") + } else if propertyType == "int64" { + code.Print("v%d := compiler.MapValueForKey(m, \"%s\")", fieldNumber, propertyName) + code.Print("if (v%d != nil) {", fieldNumber) + code.Print(" t, ok := v%d.(int)", fieldNumber) + code.Print(" if ok {") + code.Print(" x.%s = int64(t)", fieldName) + code.Print(" } else {") + code.Print(" message := fmt.Sprintf(\"has unexpected value for %s: %%+v (%%T)\", v%d, v%d)", propertyName, fieldNumber, fieldNumber) + code.Print(" errors = append(errors, compiler.NewError(context, message))") + code.Print(" }") + code.Print("}") + } else if propertyType == "bool" { + if oneOfWrapper { + propertyName := "Boolean" + code.Print("boolValue, ok := in.(bool)") + code.Print("if ok {") + code.Print(" x.Oneof = &%s_%s{%s: boolValue}", parentTypeName, propertyName, propertyName) + code.Print("}") + } else { + code.Print("v%d := compiler.MapValueForKey(m, \"%s\")", fieldNumber, propertyName) + code.Print("if (v%d != nil) {", fieldNumber) + code.Print(" x.%s, ok = v%d.(bool)", fieldName, fieldNumber) + code.Print(" if !ok {") + code.Print(" message := fmt.Sprintf(\"has unexpected value for %s: %%+v (%%T)\", v%d, v%d)", propertyName, fieldNumber, fieldNumber) + code.Print(" errors = append(errors, compiler.NewError(context, message))") + code.Print(" }") + code.Print("}") + } + } else { + mapTypeName := propertyModel.MapType + if mapTypeName != "" { + code.Print("// MAP: %s %s", mapTypeName, propertyModel.Pattern) + if mapTypeName == "string" { + code.Print("x.%s = make([]*NamedString, 0)", fieldName) + } else { + code.Print("x.%s = make([]*Named%s, 0)", fieldName, mapTypeName) + } + code.Print("for _, item := range m {") + code.Print("k, ok := compiler.StringValue(item.Key)") + code.Print("if ok {") + code.Print("v := item.Value") + if pattern := propertyModel.Pattern; pattern != "" { + if inline, ok := regexPatterns.SpecialCaseExpression(pattern, "k"); ok { + code.Print("if %s {", inline) + } else { + code.Print("if %s.MatchString(k) {", nameForPattern(regexPatterns, pattern)) + } + } + + code.Print("pair := &Named" + strings.Title(mapTypeName) + "{}") + code.Print("pair.Name = k") + + if mapTypeName == "string" { + code.Print("pair.Value = v.(string)") + } else if mapTypeName == "Any" { + code.Print("result := &Any{}") + code.Print("handled, resultFromExt, err := compiler.HandleExtension(context, v, k)") + code.Print("if handled {") + code.Print(" if err != nil {") + code.Print(" errors = append(errors, err)") + code.Print(" } else {") + code.Print(" bytes, _ := yaml.Marshal(v)") + code.Print(" result.Yaml = string(bytes)") + code.Print(" result.Value = resultFromExt") + code.Print(" pair.Value = result") + code.Print(" }") + code.Print("} else {") + code.Print(" pair.Value, err = NewAny(v, compiler.NewContext(k, context))") + code.Print(" if err != nil {") + code.Print(" errors = append(errors, err)") + code.Print(" }") + code.Print("}") + + } else { + code.Print("var err error") + code.Print("pair.Value, err = New%s(v, compiler.NewContext(k, context))", mapTypeName) + code.Print("if err != nil {") + code.Print(" errors = append(errors, err)") + code.Print("}") + } + code.Print("x.%s = append(x.%s, pair)", fieldName, fieldName) + if propertyModel.Pattern != "" { + code.Print("}") + } + code.Print("}") + code.Print("}") + } else { + code.Print("// TODO: %s", propertyType) + } + } + } + if unpackAtTop { + code.Print("}") + } + if oneOfWrapper { + code.Print("if matched {") + code.Print(" // since the oneof matched one of its possibilities, discard any matching errors") + code.Print(" errors = make([]error, 0)") + code.Print("}") + } + } + + // assumes that the return value is in a variable named "x" + code.Print(" return x, compiler.NewErrorGroupOrNil(errors)") + code.Print("}\n") +} + +// ResolveReferences() methods +func (domain *Domain) generateResolveReferencesMethodsForType(code *printer.Code, typeName string) { + code.Print("// ResolveReferences resolves references found inside %s objects.", typeName) + code.Print("func (m *%s) ResolveReferences(root string) (interface{}, error) {", typeName) + code.Print("errors := make([]error, 0)") + + typeModel := domain.TypeModels[typeName] + if typeModel.OneOfWrapper { + // call ResolveReferences on whatever is in the Oneof. + for _, propertyModel := range typeModel.Properties { + propertyType := propertyModel.Type + _, typeFound := domain.TypeModels[propertyType] + if typeFound { + code.Print("{") + code.Print("p, ok := m.Oneof.(*%s_%s)", typeName, propertyType) + code.Print("if ok {") + if propertyType == "JsonReference" { // Special case for OpenAPI + code.Print("info, err := p.%s.ResolveReferences(root)", propertyType) + code.Print("if err != nil {") + code.Print(" return nil, err") + code.Print("} else if info != nil {") + code.Print(" n, err := New%s(info, nil)", typeName) + code.Print(" if err != nil {") + code.Print(" return nil, err") + code.Print(" } else if n != nil {") + code.Print(" *m = *n") + code.Print(" return nil, nil") + code.Print(" }") + code.Print("}") + } else { + code.Print("_, err := p.%s.ResolveReferences(root)", propertyType) + code.Print("if err != nil {") + code.Print(" return nil, err") + code.Print("}") + } + code.Print("}") + code.Print("}") + } + } + } else { + for _, propertyModel := range typeModel.Properties { + propertyName := propertyModel.Name + var displayName = propertyName + if displayName == "$ref" { + displayName = "_ref" + } + if displayName == "$schema" { + displayName = "_schema" + } + displayName = camelCaseToSnakeCase(displayName) + + fieldName := strings.Title(propertyName) + if propertyName == "$ref" { + fieldName = "XRef" + code.Print("if m.XRef != \"\" {") + //code.Print("log.Printf(\"%s reference to resolve %%+v\", m.XRef)", typeName) + code.Print("info, err := compiler.ReadInfoForRef(root, m.XRef)") + + code.Print("if err != nil {") + code.Print(" return nil, err") + code.Print("}") + //code.Print("log.Printf(\"%%+v\", info)") + + if len(typeModel.Properties) > 1 { + code.Print("if info != nil {") + code.Print(" replacement, err := New%s(info, nil)", typeName) + code.Print(" if err == nil {") + code.Print(" *m = *replacement") + code.Print(" return m.ResolveReferences(root)") + code.Print(" }") + code.Print("}") + } + + code.Print("return info, nil") + code.Print("}") + } + + if !propertyModel.Repeated { + propertyType := propertyModel.Type + typeModel, typeFound := domain.TypeModels[propertyType] + if typeFound && !typeModel.IsPair { + code.Print("if m.%s != nil {", fieldName) + code.Print(" _, err := m.%s.ResolveReferences(root)", fieldName) + code.Print(" if err != nil {") + code.Print(" errors = append(errors, err)") + code.Print(" }") + code.Print("}") + } + } else { + propertyType := propertyModel.Type + _, typeFound := domain.TypeModels[propertyType] + if typeFound { + code.Print("for _, item := range m.%s {", fieldName) + code.Print("if item != nil {") + code.Print(" _, err := item.ResolveReferences(root)") + code.Print(" if err != nil {") + code.Print(" errors = append(errors, err)") + code.Print(" }") + code.Print("}") + code.Print("}") + } + + } + } + } + code.Print(" return nil, compiler.NewErrorGroupOrNil(errors)") + code.Print("}\n") +} + +// ToRawInfo() methods +func (domain *Domain) generateToRawInfoMethodForType(code *printer.Code, typeName string) { + code.Print("// ToRawInfo returns a description of %s suitable for JSON or YAML export.", typeName) + code.Print("func (m *%s) ToRawInfo() interface{} {", typeName) + typeModel := domain.TypeModels[typeName] + if typeName == "Any" { + code.Print("var err error") + code.Print("var info1 []yaml.MapSlice") + code.Print("err = yaml.Unmarshal([]byte(m.Yaml), &info1)") + code.Print("if err == nil {return info1}") + code.Print("var info2 yaml.MapSlice") + code.Print("err = yaml.Unmarshal([]byte(m.Yaml), &info2)") + code.Print("if err == nil {return info2}") + code.Print("var info3 interface{}") + code.Print("err = yaml.Unmarshal([]byte(m.Yaml), &info3)") + code.Print("if err == nil {return info3}") + code.Print("return nil") + } else if typeName == "StringArray" { + code.Print("return m.Value") + } else if typeModel.OneOfWrapper { + code.Print("// ONE OF WRAPPER") + code.Print("// %s", typeModel.Name) + for i, item := range typeModel.Properties { + code.Print("// %+v", *item) + if item.Type == "float" { + code.Print("if v%d, ok := m.GetOneof().(*%s_Number); ok {", i, typeName) + code.Print("return v%d.Number", i) + code.Print("}") + } else if item.Type == "bool" { + code.Print("if v%d, ok := m.GetOneof().(*%s_Boolean); ok {", i, typeName) + code.Print("return v%d.Boolean", i) + code.Print("}") + } else if item.Type == "string" { + code.Print("if v%d, ok := m.GetOneof().(*%s_String_); ok {", i, typeName) + code.Print("return v%d.String_", i) + code.Print("}") + } else { + code.Print("v%d := m.Get%s()", i, item.Type) + code.Print("if v%d != nil {", i) + code.Print(" return v%d.ToRawInfo()", i) + code.Print("}") + } + } + code.Print("return nil") + } else { + code.Print("info := yaml.MapSlice{}") + for _, propertyModel := range typeModel.Properties { + switch propertyModel.Type { + case "string": + propertyName := propertyModel.Name + if !propertyModel.Repeated { + code.Print("if m.%s != \"\" {", propertyModel.FieldName()) + code.Print("info = append(info, yaml.MapItem{\"%s\", m.%s})", propertyName, propertyModel.FieldName()) + code.Print("}") + } else { + code.Print("if len(m.%s) != 0 {", propertyModel.FieldName()) + code.Print("info = append(info, yaml.MapItem{\"%s\", m.%s})", propertyName, propertyModel.FieldName()) + code.Print("}") + } + case "bool": + propertyName := propertyModel.Name + if !propertyModel.Repeated { + code.Print("if m.%s != false {", propertyModel.FieldName()) + code.Print("info = append(info, yaml.MapItem{\"%s\", m.%s})", propertyName, propertyModel.FieldName()) + code.Print("}") + } else { + code.Print("if len(m.%s) != 0 {", propertyModel.FieldName()) + code.Print("info = append(info, yaml.MapItem{\"%s\", m.%s})", propertyName, propertyModel.FieldName()) + code.Print("}") + } + case "int": + propertyName := propertyModel.Name + if !propertyModel.Repeated { + code.Print("if m.%s != 0 {", propertyModel.FieldName()) + code.Print("info = append(info, yaml.MapItem{\"%s\", m.%s})", propertyName, propertyModel.FieldName()) + code.Print("}") + } else { + code.Print("if len(m.%s) != 0 {", propertyModel.FieldName()) + code.Print("info = append(info, yaml.MapItem{\"%s\", m.%s})", propertyName, propertyModel.FieldName()) + code.Print("}") + } + case "float": + propertyName := propertyModel.Name + if !propertyModel.Repeated { + code.Print("if m.%s != 0.0 {", propertyModel.FieldName()) + code.Print("info = append(info, yaml.MapItem{\"%s\", m.%s})", propertyName, propertyModel.FieldName()) + code.Print("}") + } else { + code.Print("if len(m.%s) != 0 {", propertyModel.FieldName()) + code.Print("info = append(info, yaml.MapItem{\"%s\", m.%s})", propertyName, propertyModel.FieldName()) + code.Print("}") + } + default: + propertyName := propertyModel.Name + if propertyName == "value" { + code.Print("// %+v", propertyModel) + } else if !propertyModel.Repeated { + code.Print("if m.%s != nil {", propertyModel.FieldName()) + if propertyModel.Type == "TypeItem" { + code.Print("if len(m.Type.Value) == 1 {") + code.Print("info = append(info, yaml.MapItem{\"type\", m.Type.Value[0]})") + code.Print("} else {") + code.Print("info = append(info, yaml.MapItem{\"type\", m.Type.Value})") + code.Print("}") + } else if propertyModel.Type == "ItemsItem" { + code.Print("items := make([]interface{}, 0)") + if domain.Version == "v2" { + code.Print("for _, item := range m.Items.Schema {") + } else { + code.Print("for _, item := range m.Items.SchemaOrReference {") + } + code.Print(" items = append(items, item.ToRawInfo())") + code.Print("}") + code.Print("info = append(info, yaml.MapItem{\"items\", items[0]})") + } else { + code.Print("info = append(info, yaml.MapItem{\"%s\", m.%s.ToRawInfo()})", + propertyName, propertyModel.FieldName()) + } + code.Print("}") + code.Print("// %+v", propertyModel) + } else if propertyModel.MapType == "string" { + code.Print("// %+v", propertyModel) + } else if propertyModel.MapType != "" { + code.Print("if m.%s != nil {", propertyModel.FieldName()) + code.Print("for _, item := range m.%s {", propertyModel.FieldName()) + code.Print("info = append(info, yaml.MapItem{item.Name, item.Value.ToRawInfo()})") + code.Print("}") + code.Print("}") + code.Print("// %+v", propertyModel) + } else { + code.Print("if len(m.%s) != 0 {", propertyModel.FieldName()) + code.Print("items := make([]interface{}, 0)") + code.Print("for _, item := range m.%s {", propertyModel.FieldName()) + code.Print("items = append(items, item.ToRawInfo())") + code.Print("}") + code.Print("info = append(info, yaml.MapItem{\"%s\", items})", propertyName) + code.Print("}") + code.Print("// %+v", propertyModel) + } + } + } + code.Print("return info") + } + code.Print("}\n") +} + +func (domain *Domain) generateConstantVariables(code *printer.Code, regexPatterns *patternNames) { + names := regexPatterns.Names() + var sortedNames []string + for name, _ := range names { + sortedNames = append(sortedNames, name) + } + sort.Strings(sortedNames) + code.Print("var (") + for _, name := range sortedNames { + code.Print("%s = regexp.MustCompile(\"%s\")", name, escapeSlashes(names[name])) + } + code.Print(")\n") +} diff --git a/vendor/github.com/googleapis/gnostic/generate-gnostic/generate-extension.go b/vendor/github.com/googleapis/gnostic/generate-gnostic/generate-extension.go new file mode 100644 index 000000000..87cfa462a --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/generate-gnostic/generate-extension.go @@ -0,0 +1,363 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "fmt" + "io/ioutil" + "os" + "os/exec" + "path" + "path/filepath" + "regexp" + "runtime" + "sort" + "strings" + + "github.com/googleapis/gnostic/compiler" + "github.com/googleapis/gnostic/jsonschema" + "github.com/googleapis/gnostic/printer" +) + +var protoOptionsForExtensions = []ProtoOption{ + ProtoOption{ + Name: "java_multiple_files", + Value: "true", + Comment: "// This option lets the proto compiler generate Java code inside the package\n" + + "// name (see below) instead of inside an outer class. It creates a simpler\n" + + "// developer experience by reducing one-level of name nesting and be\n" + + "// consistent with most programming languages that don't support outer classes.", + }, + + ProtoOption{ + Name: "java_outer_classname", + Value: "VendorExtensionProto", + Comment: "// The Java outer classname should be the filename in UpperCamelCase. This\n" + + "// class is only used to hold proto descriptor, so developers don't need to\n" + + "// work with it directly.", + }, +} + +const additionalCompilerCodeWithMain = "" + + "func handleExtension(extensionName string, yamlInput string) (bool, proto.Message, error) {\n" + + " switch extensionName {\n" + + " // All supported extensions\n" + + " %s\n" + + " default:\n" + + " return false, nil, nil\n" + + " }\n" + + "}\n" + + "\n" + + "func main() {\n" + + " openapiextension_v1.ProcessExtension(handleExtension)\n" + + "}\n" + +const caseStringForObjectTypes = "\n" + + "case \"%s\":\n" + + "var info yaml.MapSlice\n" + + "err := yaml.Unmarshal([]byte(yamlInput), &info)\n" + + "if err != nil {\n" + + " return true, nil, err\n" + + "}\n" + + "newObject, err := %s.New%s(info, compiler.NewContext(\"$root\", nil))\n" + + "return true, newObject, err" + +const caseStringForWrapperTypes = "\n" + + "case \"%s\":\n" + + "var info %s\n" + + "err := yaml.Unmarshal([]byte(yamlInput), &info)\n" + + "if err != nil {\n" + + " return true, nil, err\n" + + "}\n" + + "newObject := &wrappers.%s{Value: info}\n" + + "return true, newObject, nil" + +// generateMainFile generates the main program for an extension. +func generateMainFile(packageName string, license string, codeBody string, imports []string) string { + code := &printer.Code{} + code.Print(license) + code.Print("// THIS FILE IS AUTOMATICALLY GENERATED.\n") + + // generate package declaration + code.Print("package %s\n", packageName) + + code.Print("import (") + for _, filename := range imports { + code.Print("\"" + filename + "\"") + } + code.Print(")\n") + + code.Print(codeBody) + return code.String() +} + +func getBaseFileNameWithoutExt(filePath string) string { + tmp := filepath.Base(filePath) + return tmp[0 : len(tmp)-len(filepath.Ext(tmp))] +} + +func toProtoPackageName(input string) string { + var out = "" + nonAlphaNumeric := regexp.MustCompile("[^0-9A-Za-z_]+") + input = nonAlphaNumeric.ReplaceAllString(input, "") + for index, character := range input { + if character >= 'A' && character <= 'Z' { + if index > 0 && input[index-1] != '_' { + out += "_" + } + out += string(character - 'A' + 'a') + } else { + out += string(character) + } + + } + return out +} + +type primitiveTypeInfo struct { + goTypeName string + wrapperProtoName string +} + +var supportedPrimitiveTypeInfos = map[string]primitiveTypeInfo{ + "string": primitiveTypeInfo{goTypeName: "string", wrapperProtoName: "StringValue"}, + "number": primitiveTypeInfo{goTypeName: "float64", wrapperProtoName: "DoubleValue"}, + "integer": primitiveTypeInfo{goTypeName: "int64", wrapperProtoName: "Int64Value"}, + "boolean": primitiveTypeInfo{goTypeName: "bool", wrapperProtoName: "BoolValue"}, + // TODO: Investigate how to support arrays. For now users will not be allowed to + // create extension handlers for arrays and they will have to use the + // plane yaml string as is. +} + +type generatedTypeInfo struct { + schemaName string + // if this is not nil, the schema should be treataed as a primitive type. + optionalPrimitiveTypeInfo *primitiveTypeInfo +} + +// GenerateExtension generates the implementation of an extension. +func GenerateExtension(schemaFile string, outDir string) error { + outFileBaseName := getBaseFileNameWithoutExt(schemaFile) + extensionNameWithoutXDashPrefix := outFileBaseName[len("x-"):] + outDir = path.Join(outDir, "gnostic-x-"+extensionNameWithoutXDashPrefix) + protoPackage := toProtoPackageName(extensionNameWithoutXDashPrefix) + protoPackageName := strings.ToLower(protoPackage) + goPackageName := protoPackageName + + protoOutDirectory := outDir + "/" + "proto" + var err error + + projectRoot := os.Getenv("GOPATH") + "/src/github.com/googleapis/gnostic/" + baseSchema, err := jsonschema.NewSchemaFromFile(projectRoot + "jsonschema/schema.json") + if err != nil { + return err + } + baseSchema.ResolveRefs() + baseSchema.ResolveAllOfs() + + openapiSchema, err := jsonschema.NewSchemaFromFile(schemaFile) + if err != nil { + return err + } + openapiSchema.ResolveRefs() + openapiSchema.ResolveAllOfs() + + // build a simplified model of the types described by the schema + cc := NewDomain(openapiSchema, "v2") // TODO fix for OpenAPI v3 + + // create a type for each object defined in the schema + extensionNameToMessageName := make(map[string]generatedTypeInfo) + schemaErrors := make([]error, 0) + supportedPrimitives := make([]string, 0) + for key := range supportedPrimitiveTypeInfos { + supportedPrimitives = append(supportedPrimitives, key) + } + sort.Strings(supportedPrimitives) + if cc.Schema.Definitions != nil { + for _, pair := range *(cc.Schema.Definitions) { + definitionName := pair.Name + definitionSchema := pair.Value + // ensure the id field is set + if definitionSchema.ID == nil || len(*(definitionSchema.ID)) == 0 { + schemaErrors = append(schemaErrors, + fmt.Errorf("schema %s has no 'id' field, which must match the "+ + "name of the OpenAPI extension that the schema represents", + definitionName)) + } else { + if _, ok := extensionNameToMessageName[*(definitionSchema.ID)]; ok { + schemaErrors = append(schemaErrors, + fmt.Errorf("schema %s and %s have the same 'id' field value", + definitionName, extensionNameToMessageName[*(definitionSchema.ID)].schemaName)) + } else if (definitionSchema.Type == nil) || (*definitionSchema.Type.String == "object") { + extensionNameToMessageName[*(definitionSchema.ID)] = generatedTypeInfo{schemaName: definitionName} + } else { + // this is a primitive type + if val, ok := supportedPrimitiveTypeInfos[*definitionSchema.Type.String]; ok { + extensionNameToMessageName[*(definitionSchema.ID)] = generatedTypeInfo{schemaName: definitionName, optionalPrimitiveTypeInfo: &val} + } else { + schemaErrors = append(schemaErrors, + fmt.Errorf("Schema %s has type '%s' which is "+ + "not supported. Supported primitive types are "+ + "%s.\n", definitionName, + *definitionSchema.Type.String, + supportedPrimitives)) + } + } + } + typeName := cc.TypeNameForStub(definitionName) + typeModel := cc.BuildTypeForDefinition(typeName, definitionName, definitionSchema) + if typeModel != nil { + cc.TypeModels[typeName] = typeModel + } + } + } + if len(schemaErrors) > 0 { + // error has been reported. + return compiler.NewErrorGroupOrNil(schemaErrors) + } + + err = os.MkdirAll(outDir, os.ModePerm) + if err != nil { + return err + } + + err = os.MkdirAll(protoOutDirectory, os.ModePerm) + if err != nil { + return err + } + + // generate the protocol buffer description + protoOptions := append(protoOptionsForExtensions, + ProtoOption{Name: "java_package", Value: "org.openapi.extension." + strings.ToLower(protoPackage), Comment: "// The Java package name must be proto package name with proper prefix."}, + ProtoOption{Name: "objc_class_prefix", Value: strings.ToLower(protoPackage), + Comment: "// A reasonable prefix for the Objective-C symbols generated from the package.\n" + + "// It should at a minimum be 3 characters long, all uppercase, and convention\n" + + "// is to use an abbreviation of the package name. Something short, but\n" + + "// hopefully unique enough to not conflict with things that may come along in\n" + + "// the future. 'GPB' is reserved for the protocol buffer implementation itself.", + }) + + proto := cc.generateProto(protoPackageName, License, protoOptions, nil) + protoFilename := path.Join(protoOutDirectory, outFileBaseName+".proto") + + err = ioutil.WriteFile(protoFilename, []byte(proto), 0644) + if err != nil { + return err + } + + // generate the compiler + compiler := cc.GenerateCompiler(goPackageName, License, []string{ + "fmt", + "regexp", + "strings", + "github.com/googleapis/gnostic/compiler", + "gopkg.in/yaml.v2", + }) + goFilename := path.Join(protoOutDirectory, outFileBaseName+".go") + err = ioutil.WriteFile(goFilename, []byte(compiler), 0644) + if err != nil { + return err + } + err = exec.Command(runtime.GOROOT()+"/bin/gofmt", "-w", goFilename).Run() + + // generate the main file. + outDirRelativeToGoPathSrc := strings.Replace(outDir, path.Join(os.Getenv("GOPATH"), "src")+"/", "", 1) + + var extensionNameKeys []string + for k := range extensionNameToMessageName { + extensionNameKeys = append(extensionNameKeys, k) + } + sort.Strings(extensionNameKeys) + + wrapperTypeIncluded := false + var cases string + for _, extensionName := range extensionNameKeys { + if extensionNameToMessageName[extensionName].optionalPrimitiveTypeInfo == nil { + cases += fmt.Sprintf(caseStringForObjectTypes, extensionName, goPackageName, extensionNameToMessageName[extensionName].schemaName) + } else { + wrapperTypeIncluded = true + cases += fmt.Sprintf(caseStringForWrapperTypes, extensionName, extensionNameToMessageName[extensionName].optionalPrimitiveTypeInfo.goTypeName, extensionNameToMessageName[extensionName].optionalPrimitiveTypeInfo.wrapperProtoName) + } + + } + extMainCode := fmt.Sprintf(additionalCompilerCodeWithMain, cases) + imports := []string{ + "github.com/golang/protobuf/proto", + "github.com/googleapis/gnostic/extensions", + "github.com/googleapis/gnostic/compiler", + "gopkg.in/yaml.v2", + outDirRelativeToGoPathSrc + "/" + "proto", + } + if wrapperTypeIncluded { + imports = append(imports, "github.com/golang/protobuf/ptypes/wrappers") + } + main := generateMainFile("main", License, extMainCode, imports) + mainFileName := path.Join(outDir, "main.go") + err = ioutil.WriteFile(mainFileName, []byte(main), 0644) + if err != nil { + return err + } + + // format the compiler + return exec.Command(runtime.GOROOT()+"/bin/gofmt", "-w", mainFileName).Run() +} + +func processExtensionGenCommandline(usage string) error { + + outDir := "" + schameFile := "" + + extParamRegex, _ := regexp.Compile("--(.+)=(.+)") + + for i, arg := range os.Args { + if i == 0 { + continue // skip the tool name + } + var m [][]byte + if m = extParamRegex.FindSubmatch([]byte(arg)); m != nil { + flagName := string(m[1]) + flagValue := string(m[2]) + switch flagName { + case "out_dir": + outDir = flagValue + default: + fmt.Printf("Unknown option: %s.\n%s\n", arg, usage) + os.Exit(-1) + } + } else if arg == "--extension" { + continue + } else if arg[0] == '-' { + fmt.Printf("Unknown option: %s.\n%s\n", arg, usage) + os.Exit(-1) + } else { + schameFile = arg + } + } + + if schameFile == "" { + fmt.Printf("No input json schema specified.\n%s\n", usage) + os.Exit(-1) + } + if outDir == "" { + fmt.Printf("Missing output directive.\n%s\n", usage) + os.Exit(-1) + } + if !strings.HasPrefix(getBaseFileNameWithoutExt(schameFile), "x-") { + fmt.Printf("Schema file name has to start with 'x-'.\n%s\n", usage) + os.Exit(-1) + } + + return GenerateExtension(schameFile, outDir) +} diff --git a/vendor/github.com/googleapis/gnostic/generate-gnostic/generate-extension_test.go b/vendor/github.com/googleapis/gnostic/generate-gnostic/generate-extension_test.go new file mode 100644 index 000000000..004a78611 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/generate-gnostic/generate-extension_test.go @@ -0,0 +1,52 @@ +package main + +import ( + "io/ioutil" + "os" + "os/exec" + "testing" +) + +func TestErrorExtensionGeneratorUnsupportedPrimitive(t *testing.T) { + var err error + + output, err := exec.Command( + "generator", + "--extension", + "test/x-unsupportedprimitives.json", + "--out_dir=/tmp", + ).Output() + + outputFile := "x-unsupportedprimitives.errors" + _ = ioutil.WriteFile(outputFile, output, 0644) + err = exec.Command("diff", outputFile, "test/errors/x-unsupportedprimitives.errors").Run() + if err != nil { + t.Logf("Diff failed: %+v", err) + t.FailNow() + } else { + // if the test succeeded, clean up + os.Remove(outputFile) + } +} + +func TestErrorExtensionGeneratorNameCollision(t *testing.T) { + var err error + + output, err := exec.Command( + "generator", + "--extension", + "test/x-extension-name-collision.json", + "--out_dir=/tmp", + ).Output() + + outputFile := "x-extension-name-collision.errors" + _ = ioutil.WriteFile(outputFile, output, 0644) + err = exec.Command("diff", outputFile, "test/errors/x-extension-name-collision.errors").Run() + if err != nil { + t.Logf("Diff failed: %+v", err) + t.FailNow() + } else { + // if the test succeeded, clean up + os.Remove(outputFile) + } +} diff --git a/vendor/github.com/googleapis/gnostic/generate-gnostic/generate-proto.go b/vendor/github.com/googleapis/gnostic/generate-gnostic/generate-proto.go new file mode 100644 index 000000000..4741af6ad --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/generate-gnostic/generate-proto.go @@ -0,0 +1,119 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "fmt" + "strings" + + "github.com/googleapis/gnostic/printer" +) + +// ProtoOption represents an option to be added to generated .proto files. +type ProtoOption struct { + Name string + Value string + Comment string +} + +func (domain *Domain) generateProto(packageName string, license string, options []ProtoOption, imports []string) string { + code := &printer.Code{} + code.Print(license) + code.Print("// THIS FILE IS AUTOMATICALLY GENERATED.") + code.Print() + + code.Print("syntax = \"proto3\";") + code.Print() + code.Print("package " + packageName + ";") + for _, importString := range imports { + code.Print() + code.Print("import \"" + importString + "\";") + } + code.Print() + + // generate option declarations + for _, option := range options { + commentLines := strings.Split(option.Comment, "\n") + for _, commentLine := range commentLines { + code.Print(commentLine) + } + line := "option " + option.Name + " = " + if option.Value == "true" || option.Value == "false" { + line += option.Value + } else { + line += "\"" + option.Value + "\"" + } + line += ";\n" + code.Print(line) + } + + // generate message definitions + typeNames := domain.sortedTypeNames() + for _, typeName := range typeNames { + typeModel := domain.TypeModels[typeName] + if typeModel.Description != "" { + code.Print("// %s", typeModel.Description) + } + code.Print("message %s {", typeName) + code.Indent() + if typeModel.OneOfWrapper { + code.Print("oneof oneof {") + code.Indent() + } + var fieldNumber = 0 + for _, propertyModel := range typeModel.Properties { + if propertyModel.Description != "" { + code.Print("// %s", propertyModel.Description) + } + propertyName := propertyModel.Name + fieldNumber++ + propertyType := propertyModel.Type + if propertyType == "int" { + propertyType = "int64" + } + if propertyType == "float" { + propertyType = "double" + } + + // TODO may be remove this. + if propertyType == "blob" { + propertyType = "string" + } + + var displayName = propertyName + if displayName == "$ref" { + displayName = "_ref" + } + if displayName == "$schema" { + displayName = "_schema" + } + displayName = camelCaseToSnakeCase(displayName) + + var line = fmt.Sprintf("%s %s = %d;", propertyType, displayName, fieldNumber) + if propertyModel.Repeated { + line = "repeated " + line + } + code.Print(line) + } + if typeModel.OneOfWrapper { + code.Outdent() + code.Print("}") + } + code.Outdent() + code.Print("}") + code.Print() + } + return code.String() +} diff --git a/vendor/github.com/googleapis/gnostic/generate-gnostic/helpers.go b/vendor/github.com/googleapis/gnostic/generate-gnostic/helpers.go new file mode 100644 index 000000000..221af2a44 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/generate-gnostic/helpers.go @@ -0,0 +1,55 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "strings" + "unicode" +) + +// Returns a "snake case" form of a camel-cased string. +func camelCaseToSnakeCase(input string) string { + out := "" + for index, runeValue := range input { + //fmt.Printf("%#U starts at byte position %d\n", runeValue, index) + if runeValue >= 'A' && runeValue <= 'Z' { + if index > 0 { + out += "_" + } + out += string(runeValue - 'A' + 'a') + } else { + out += string(runeValue) + } + } + return out +} + +func snakeCaseToCamelCase(input string) string { + out := "" + + words := strings.Split(input, "_") + + for i, word := range words { + if (i > 0) && len(word) > 0 { + w := []rune(word) + w[0] = unicode.ToUpper(w[0]) + out += string(w) + } else { + out += word + } + } + + return out +} diff --git a/vendor/github.com/googleapis/gnostic/generate-gnostic/main.go b/vendor/github.com/googleapis/gnostic/generate-gnostic/main.go new file mode 100644 index 000000000..2eefa857d --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/generate-gnostic/main.go @@ -0,0 +1,257 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// generator generates Protocol Buffer models and support code from +// JSON Schemas. It is used to generate representations of the +// OpenAPI Specification and vendor and specification extensions +// that are added by third-party OpenAPI authors. +package main + +import ( + "fmt" + "io/ioutil" + "log" + "os" + "os/exec" + "path" + "runtime" + "strings" + + "github.com/googleapis/gnostic/jsonschema" +) + +// License is the software license applied to generated code. +const License = "" + + "// Copyright 2017 Google Inc. All Rights Reserved.\n" + + "//\n" + + "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + + "// you may not use this file except in compliance with the License.\n" + + "// You may obtain a copy of the License at\n" + + "//\n" + + "// http://www.apache.org/licenses/LICENSE-2.0\n" + + "//\n" + + "// Unless required by applicable law or agreed to in writing, software\n" + + "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + + "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + + "// See the License for the specific language governing permissions and\n" + + "// limitations under the License.\n" + +func protoOptions(packageName string) []ProtoOption { + return []ProtoOption{ + ProtoOption{ + Name: "java_multiple_files", + Value: "true", + Comment: "// This option lets the proto compiler generate Java code inside the package\n" + + "// name (see below) instead of inside an outer class. It creates a simpler\n" + + "// developer experience by reducing one-level of name nesting and be\n" + + "// consistent with most programming languages that don't support outer classes.", + }, + + ProtoOption{ + Name: "java_outer_classname", + Value: "OpenAPIProto", + Comment: "// The Java outer classname should be the filename in UpperCamelCase. This\n" + + "// class is only used to hold proto descriptor, so developers don't need to\n" + + "// work with it directly.", + }, + + ProtoOption{ + Name: "java_package", + Value: "org." + packageName, + Comment: "// The Java package name must be proto package name with proper prefix.", + }, + + ProtoOption{ + Name: "objc_class_prefix", + Value: "OAS", + Comment: "// A reasonable prefix for the Objective-C symbols generated from the package.\n" + + "// It should at a minimum be 3 characters long, all uppercase, and convention\n" + + "// is to use an abbreviation of the package name. Something short, but\n" + + "// hopefully unique enough to not conflict with things that may come along in\n" + + "// the future. 'GPB' is reserved for the protocol buffer implementation itself.", + }, + } +} + +func generateOpenAPIModel(version string) error { + var input string + var filename string + var protoPackageName string + + switch version { + case "v2": + input = "openapi-2.0.json" + filename = "OpenAPIv2" + protoPackageName = "openapi.v2" + case "v3": + input = "openapi-3.0.json" + filename = "OpenAPIv3" + protoPackageName = "openapi.v3" + case "discovery": + input = "discovery.json" + filename = "discovery" + protoPackageName = "discovery.v1" + default: + return fmt.Errorf("Unknown OpenAPI version %s", version) + } + + goPackageName := strings.Replace(protoPackageName, ".", "_", -1) + + projectRoot := os.Getenv("GOPATH") + "/src/github.com/googleapis/gnostic/" + + baseSchema, err := jsonschema.NewSchemaFromFile(projectRoot + "jsonschema/schema.json") + if err != nil { + return err + } + baseSchema.ResolveRefs() + baseSchema.ResolveAllOfs() + + openapiSchema, err := jsonschema.NewSchemaFromFile(projectRoot + filename + "/" + input) + if err != nil { + return err + } + openapiSchema.ResolveRefs() + openapiSchema.ResolveAllOfs() + + // build a simplified model of the types described by the schema + cc := NewDomain(openapiSchema, version) + // generators will map these patterns to the associated property names + // these pattern names are a bit of a hack until we find a more automated way to obtain them + + switch version { + case "v2": + cc.TypeNameOverrides = map[string]string{ + "VendorExtension": "Any", + } + cc.PropertyNameOverrides = map[string]string{ + "PathItem": "Path", + "ResponseValue": "ResponseCode", + } + case "v3": + cc.TypeNameOverrides = map[string]string{ + "SpecificationExtension": "Any", + } + cc.PropertyNameOverrides = map[string]string{ + "PathItem": "Path", + "ResponseValue": "ResponseCode", + } + case "discovery": + cc.TypeNameOverrides = map[string]string{} + cc.PropertyNameOverrides = map[string]string{} + default: + return fmt.Errorf("Unknown OpenAPI version %s", version) + } + + err = cc.Build() + if err != nil { + return err + } + + if true { + log.Printf("Type Model:\n%s", cc.Description()) + } + + // ensure that the target directory exists + err = os.MkdirAll(projectRoot+filename, 0755) + if err != nil { + return err + } + + // generate the protocol buffer description + log.Printf("Generating protocol buffer description") + proto := cc.generateProto(protoPackageName, License, + protoOptions(goPackageName), []string{"google/protobuf/any.proto"}) + protoFileName := projectRoot + filename + "/" + filename + ".proto" + err = ioutil.WriteFile(protoFileName, []byte(proto), 0644) + if err != nil { + return err + } + + // generate the compiler + log.Printf("Generating compiler support code") + compiler := cc.GenerateCompiler(goPackageName, License, []string{ + "fmt", + "gopkg.in/yaml.v2", + "strings", + "regexp", + "github.com/googleapis/gnostic/compiler", + }) + goFileName := projectRoot + filename + "/" + filename + ".go" + err = ioutil.WriteFile(goFileName, []byte(compiler), 0644) + if err != nil { + return err + } + // format the compiler + log.Printf("Formatting compiler support code") + return exec.Command(runtime.GOROOT()+"/bin/gofmt", "-w", goFileName).Run() +} + +func usage() string { + return fmt.Sprintf(` +Usage: %s [OPTIONS] +Options: + --v2 + Generate Protocol Buffer representation and support code for OpenAPI v2. + Files are read from and written to appropriate locations in the gnostic + project directory. + --v3 + Generate Protocol Buffer representation and support code for OpenAPI v3 + Files are read from and written to appropriate locations in the gnostic + project directory. + --extension EXTENSION_SCHEMA [EXTENSIONOPTIONS] + Generate a gnostic extension that reads a set of OpenAPI extensions. + EXTENSION_SCHEMA is the json schema for the OpenAPI extensions to be + supported. + EXTENSION_OPTIONS + --out_dir=PATH: Location for writing extension models and support code. +`, path.Base(os.Args[0])) +} + +func main() { + var openapiVersion = "" + var generateExtensions = false + + for i, arg := range os.Args { + if i == 0 { + continue // skip the tool name + } + if arg == "--v2" { + openapiVersion = "v2" + } else if arg == "--v3" { + openapiVersion = "v3" + } else if arg == "--discovery" { + openapiVersion = "discovery" + } else if arg == "--extension" { + generateExtensions = true + break + } else { + fmt.Printf("Unknown option: %s.\n%s\n", arg, usage()) + os.Exit(-1) + } + } + + if openapiVersion != "" { + err := generateOpenAPIModel(openapiVersion) + if err != nil { + fmt.Printf("%+v\n", err) + } + } else if generateExtensions { + err := processExtensionGenCommandline(usage()) + if err != nil { + fmt.Printf("%+v\n", err) + } + } else { + fmt.Printf("%s\n", usage()) + } +} diff --git a/vendor/github.com/googleapis/gnostic/generate-gnostic/test/errors/x-extension-name-collision.errors b/vendor/github.com/googleapis/gnostic/generate-gnostic/test/errors/x-extension-name-collision.errors new file mode 100644 index 000000000..abea4309b --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/generate-gnostic/test/errors/x-extension-name-collision.errors @@ -0,0 +1,2 @@ +Schema SampleCompanyTwoPrimitiveString and SampleCompanyOnePrimitiveString have the same 'id' field value. + diff --git a/vendor/github.com/googleapis/gnostic/generate-gnostic/test/errors/x-unsupportedprimitives.errors b/vendor/github.com/googleapis/gnostic/generate-gnostic/test/errors/x-unsupportedprimitives.errors new file mode 100644 index 000000000..721a5f22c --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/generate-gnostic/test/errors/x-unsupportedprimitives.errors @@ -0,0 +1,2 @@ +Schema SampleCompanyOnePrimitiveString has type 'unsupportedtype' which is not supported. Supported primitive types are [boolean integer number string]. + diff --git a/vendor/github.com/googleapis/gnostic/generate-gnostic/test/x-extension-name-collision.json b/vendor/github.com/googleapis/gnostic/generate-gnostic/test/x-extension-name-collision.json new file mode 100644 index 000000000..ed4ece5fa --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/generate-gnostic/test/x-extension-name-collision.json @@ -0,0 +1,12 @@ + { + "definitions": { + "SampleCompanyOnePrimitiveString": { + "type": "string", + "id": "x-samplecompanyone-mystr" + }, + "SampleCompanyTwoPrimitiveString": { + "type": "string", + "id": "x-samplecompanyone-mystr" + } + } + } \ No newline at end of file diff --git a/vendor/github.com/googleapis/gnostic/generate-gnostic/test/x-unsupportedprimitives.json b/vendor/github.com/googleapis/gnostic/generate-gnostic/test/x-unsupportedprimitives.json new file mode 100644 index 000000000..612cd37b9 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/generate-gnostic/test/x-unsupportedprimitives.json @@ -0,0 +1,8 @@ + { + "definitions": { + "SampleCompanyOnePrimitiveString": { + "type": "unsupportedtype", + "id": "x-samplecompanyone-mystr" + } + } + } \ No newline at end of file diff --git a/vendor/github.com/googleapis/gnostic/generate-gnostic/types.go b/vendor/github.com/googleapis/gnostic/generate-gnostic/types.go new file mode 100644 index 000000000..e0859f795 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/generate-gnostic/types.go @@ -0,0 +1,132 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "fmt" + "strings" + + "github.com/googleapis/gnostic/jsonschema" +) + +/// Type Modeling + +// TypeRequest models types that we encounter during model-building that have no named schema. +type TypeRequest struct { + Name string // name of type to be created + PropertyName string // name of a property that refers to this type + Schema *jsonschema.Schema // schema for type + OneOfWrapper bool // true if the type wraps "oneOfs" +} + +// NewTypeRequest creates a TypeRequest. +func NewTypeRequest(name string, propertyName string, schema *jsonschema.Schema) *TypeRequest { + return &TypeRequest{Name: name, PropertyName: propertyName, Schema: schema} +} + +// TypeProperty models type properties, eg. fields. +type TypeProperty struct { + Name string // name of property + Type string // type for property (scalar or message type) + StringEnumValues []string // possible values if this is an enumerated string type + MapType string // if this property is for a map, the name of the mapped type + Repeated bool // true if this property is repeated (an array) + Pattern string // if the property is a pattern property, names must match this pattern. + Implicit bool // true if this property is implied by a pattern or "additional properties" property + Description string // if present, the "description" field in the schema +} + +func (typeProperty *TypeProperty) description() string { + result := "" + if typeProperty.Description != "" { + result += fmt.Sprintf("\t// %+s\n", typeProperty.Description) + } + if typeProperty.Repeated { + result += fmt.Sprintf("\t%s %s repeated %s\n", typeProperty.Name, typeProperty.Type, typeProperty.Pattern) + } else { + result += fmt.Sprintf("\t%s %s %s \n", typeProperty.Name, typeProperty.Type, typeProperty.Pattern) + } + return result +} + +// NewTypeProperty creates a TypeProperty +func NewTypeProperty() *TypeProperty { + return &TypeProperty{} +} + +// NewTypePropertyWithNameAndType creates a TypeProperty +func NewTypePropertyWithNameAndType(name string, typeName string) *TypeProperty { + return &TypeProperty{Name: name, Type: typeName} +} + +// NewTypePropertyWithNameTypeAndPattern creates a TypeProperty +func NewTypePropertyWithNameTypeAndPattern(name string, typeName string, pattern string) *TypeProperty { + return &TypeProperty{Name: name, Type: typeName, Pattern: pattern} +} + +// FieldName returns the message field name to use for a property. +func (typeProperty *TypeProperty) FieldName() string { + propertyName := typeProperty.Name + if propertyName == "$ref" { + return "XRef" + } + return strings.Title(snakeCaseToCamelCase(propertyName)) +} + +// TypeModel models types. +type TypeModel struct { + Name string // type name + Properties []*TypeProperty // slice of properties + Required []string // required property names + OneOfWrapper bool // true if this type wraps "oneof" properties + Open bool // open types can have keys outside the specified set + OpenPatterns []string // patterns for properties that we allow + IsStringArray bool // ugly override + IsItemArray bool // ugly override + IsBlob bool // ugly override + IsPair bool // type is a name-value pair used to support ordered maps + PairValueType string // type for pair values (valid if IsPair == true) + Description string // if present, the "description" field in the schema +} + +func (typeModel *TypeModel) addProperty(property *TypeProperty) { + if typeModel.Properties == nil { + typeModel.Properties = make([]*TypeProperty, 0) + } + typeModel.Properties = append(typeModel.Properties, property) +} + +func (typeModel *TypeModel) description() string { + result := "" + if typeModel.Description != "" { + result += fmt.Sprintf("// %+s\n", typeModel.Description) + } + var wrapperinfo string + if typeModel.OneOfWrapper { + wrapperinfo = " oneof wrapper" + } + result += fmt.Sprintf("%+s%s\n", typeModel.Name, wrapperinfo) + for _, property := range typeModel.Properties { + result += property.description() + } + return result +} + +// NewTypeModel creates a TypeModel. +func NewTypeModel() *TypeModel { + typeModel := &TypeModel{} + typeModel.Properties = make([]*TypeProperty, 0) + return typeModel +} diff --git a/vendor/github.com/googleapis/gnostic/gnostic.go b/vendor/github.com/googleapis/gnostic/gnostic.go new file mode 100644 index 000000000..80e050503 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/gnostic.go @@ -0,0 +1,550 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:generate ./COMPILE-PROTOS.sh + +// Gnostic is a tool for building better REST APIs through knowledge. +// +// Gnostic reads declarative descriptions of REST APIs that conform +// to the OpenAPI Specification, reports errors, resolves internal +// dependencies, and puts the results in a binary form that can +// be used in any language that is supported by the Protocol Buffer +// tools. +// +// Gnostic models are validated and typed. This allows API tool +// developers to focus on their product and not worry about input +// validation and type checking. +// +// Gnostic calls plugins that implement a variety of API implementation +// and support features including generation of client and server +// support code. +package main + +import ( + "bytes" + "errors" + "fmt" + "io" + "os" + "os/exec" + "path/filepath" + "regexp" + "strings" + + "github.com/golang/protobuf/proto" + "github.com/googleapis/gnostic/OpenAPIv2" + "github.com/googleapis/gnostic/OpenAPIv3" + "github.com/googleapis/gnostic/compiler" + "github.com/googleapis/gnostic/discovery" + "github.com/googleapis/gnostic/jsonwriter" + plugins "github.com/googleapis/gnostic/plugins" + surface "github.com/googleapis/gnostic/surface" + "gopkg.in/yaml.v2" +) + +const ( // Source Format + SourceFormatUnknown = 0 + SourceFormatOpenAPI2 = 2 + SourceFormatOpenAPI3 = 3 + SourceFormatDiscovery = 4 +) + +// Determine the version of an OpenAPI description read from JSON or YAML. +func getOpenAPIVersionFromInfo(info interface{}) int { + m, ok := compiler.UnpackMap(info) + if !ok { + return SourceFormatUnknown + } + swagger, ok := compiler.MapValueForKey(m, "swagger").(string) + if ok && strings.HasPrefix(swagger, "2.0") { + return SourceFormatOpenAPI2 + } + openapi, ok := compiler.MapValueForKey(m, "openapi").(string) + if ok && strings.HasPrefix(openapi, "3.0") { + return SourceFormatOpenAPI3 + } + kind, ok := compiler.MapValueForKey(m, "kind").(string) + if ok && kind == "discovery#restDescription" { + return SourceFormatDiscovery + } + return SourceFormatUnknown +} + +const ( + pluginPrefix = "gnostic-" + extensionPrefix = "gnostic-x-" +) + +type pluginCall struct { + Name string + Invocation string +} + +// Invokes a plugin. +func (p *pluginCall) perform(document proto.Message, sourceFormat int, sourceName string) error { + if p.Name != "" { + request := &plugins.Request{} + + // Infer the name of the executable by adding the prefix. + executableName := pluginPrefix + p.Name + + // Validate invocation string with regular expression. + invocation := p.Invocation + + // + // Plugin invocations must consist of + // zero or more comma-separated key=value pairs followed by a path. + // If pairs are present, a colon separates them from the path. + // Keys and values must be alphanumeric strings and may contain + // dashes, underscores, periods, or forward slashes. + // A path can contain any characters other than the separators ',', ':', and '='. + // + invocationRegex := regexp.MustCompile(`^([\w-_\/\.]+=[\w-_\/\.]+(,[\w-_\/\.]+=[\w-_\/\.]+)*:)?[^,:=]+$`) + if !invocationRegex.Match([]byte(p.Invocation)) { + return fmt.Errorf("Invalid invocation of %s: %s", executableName, invocation) + } + + invocationParts := strings.Split(p.Invocation, ":") + var outputLocation string + switch len(invocationParts) { + case 1: + outputLocation = invocationParts[0] + case 2: + parameters := strings.Split(invocationParts[0], ",") + for _, keyvalue := range parameters { + pair := strings.Split(keyvalue, "=") + if len(pair) == 2 { + request.Parameters = append(request.Parameters, &plugins.Parameter{Name: pair[0], Value: pair[1]}) + } + } + outputLocation = invocationParts[1] + default: + // badly-formed request + outputLocation = invocationParts[len(invocationParts)-1] + } + + version := &plugins.Version{} + version.Major = 0 + version.Minor = 1 + version.Patch = 0 + request.CompilerVersion = version + + request.OutputPath = outputLocation + + request.SourceName = sourceName + switch sourceFormat { + case SourceFormatOpenAPI2: + request.Openapi2 = document.(*openapi_v2.Document) + request.Surface, _ = surface.NewModelFromOpenAPI2(request.Openapi2) + case SourceFormatOpenAPI3: + request.Openapi3 = document.(*openapi_v3.Document) + request.Surface, _ = surface.NewModelFromOpenAPI3(request.Openapi3) + default: + } + + requestBytes, _ := proto.Marshal(request) + + cmd := exec.Command(executableName, "-plugin") + cmd.Stdin = bytes.NewReader(requestBytes) + cmd.Stderr = os.Stderr + output, err := cmd.Output() + if err != nil { + return err + } + response := &plugins.Response{} + err = proto.Unmarshal(output, response) + if err != nil { + return err + } + + plugins.HandleResponse(response, outputLocation) + } + return nil +} + +func isFile(path string) bool { + fileInfo, err := os.Stat(path) + if err != nil { + return false + } + return !fileInfo.IsDir() +} + +func isDirectory(path string) bool { + fileInfo, err := os.Stat(path) + if err != nil { + return false + } + return fileInfo.IsDir() +} + +// Write bytes to a named file. +// Certain names have special meaning: +// ! writes nothing +// - writes to stdout +// = writes to stderr +// If a directory name is given, the file is written there with +// a name derived from the source and extension arguments. +func writeFile(name string, bytes []byte, source string, extension string) { + var writer io.Writer + if name == "!" { + return + } else if name == "-" { + writer = os.Stdout + } else if name == "=" { + writer = os.Stderr + } else if isDirectory(name) { + base := filepath.Base(source) + // Remove the original source extension. + base = base[0 : len(base)-len(filepath.Ext(base))] + // Build the path that puts the result in the passed-in directory. + filename := name + "/" + base + "." + extension + file, _ := os.Create(filename) + defer file.Close() + writer = file + } else { + file, _ := os.Create(name) + defer file.Close() + writer = file + } + writer.Write(bytes) + if name == "-" || name == "=" { + writer.Write([]byte("\n")) + } +} + +// The Gnostic structure holds global state information for gnostic. +type Gnostic struct { + usage string + sourceName string + binaryOutputPath string + textOutputPath string + yamlOutputPath string + jsonOutputPath string + errorOutputPath string + resolveReferences bool + pluginCalls []*pluginCall + extensionHandlers []compiler.ExtensionHandler + sourceFormat int +} + +// Initialize a structure to store global application state. +func newGnostic() *Gnostic { + g := &Gnostic{} + // Option fields initialize to their default values. + g.usage = ` +Usage: gnostic OPENAPI_SOURCE [OPTIONS] + OPENAPI_SOURCE is the filename or URL of an OpenAPI description to read. +Options: + --pb-out=PATH Write a binary proto to the specified location. + --text-out=PATH Write a text proto to the specified location. + --json-out=PATH Write a json API description to the specified location. + --yaml-out=PATH Write a yaml API description to the specified location. + --errors-out=PATH Write compilation errors to the specified location. + --PLUGIN-out=PATH Run the plugin named gnostic_PLUGIN and write results + to the specified location. + --x-EXTENSION Use the extension named gnostic-x-EXTENSION + to process OpenAPI specification extensions. + --resolve-refs Explicitly resolve $ref references. + This could have problems with recursive definitions. +` + // Initialize internal structures. + g.pluginCalls = make([]*pluginCall, 0) + g.extensionHandlers = make([]compiler.ExtensionHandler, 0) + return g +} + +// Parse command-line options. +func (g *Gnostic) readOptions() { + // plugin processing matches patterns of the form "--PLUGIN-out=PATH" and "--PLUGIN_out=PATH" + pluginRegex := regexp.MustCompile("--(.+)[-_]out=(.+)") + + // extension processing matches patterns of the form "--x-EXTENSION" + extensionRegex := regexp.MustCompile("--x-(.+)") + + for i, arg := range os.Args { + if i == 0 { + continue // skip the tool name + } + var m [][]byte + if m = pluginRegex.FindSubmatch([]byte(arg)); m != nil { + pluginName := string(m[1]) + invocation := string(m[2]) + switch pluginName { + case "pb": + g.binaryOutputPath = invocation + case "text": + g.textOutputPath = invocation + case "json": + g.jsonOutputPath = invocation + case "yaml": + g.yamlOutputPath = invocation + case "errors": + g.errorOutputPath = invocation + default: + p := &pluginCall{Name: pluginName, Invocation: invocation} + g.pluginCalls = append(g.pluginCalls, p) + } + } else if m = extensionRegex.FindSubmatch([]byte(arg)); m != nil { + extensionName := string(m[1]) + extensionHandler := compiler.ExtensionHandler{Name: extensionPrefix + extensionName} + g.extensionHandlers = append(g.extensionHandlers, extensionHandler) + } else if arg == "--resolve-refs" { + g.resolveReferences = true + } else if arg[0] == '-' { + fmt.Fprintf(os.Stderr, "Unknown option: %s.\n%s\n", arg, g.usage) + os.Exit(-1) + } else { + g.sourceName = arg + } + } +} + +// Validate command-line options. +func (g *Gnostic) validateOptions() { + if g.binaryOutputPath == "" && + g.textOutputPath == "" && + g.yamlOutputPath == "" && + g.jsonOutputPath == "" && + g.errorOutputPath == "" && + len(g.pluginCalls) == 0 { + fmt.Fprintf(os.Stderr, "Missing output directives.\n%s\n", g.usage) + os.Exit(-1) + } + if g.sourceName == "" { + fmt.Fprintf(os.Stderr, "No input specified.\n%s\n", g.usage) + os.Exit(-1) + } + // If we get here and the error output is unspecified, write errors to stderr. + if g.errorOutputPath == "" { + g.errorOutputPath = "=" + } +} + +// Generate an error message to be written to stderr or a file. +func (g *Gnostic) errorBytes(err error) []byte { + return []byte("Errors reading " + g.sourceName + "\n" + err.Error()) +} + +// Read an OpenAPI description from YAML or JSON. +func (g *Gnostic) readOpenAPIText(bytes []byte) (message proto.Message, err error) { + info, err := compiler.ReadInfoFromBytes(g.sourceName, bytes) + if err != nil { + return nil, err + } + // Determine the OpenAPI version. + g.sourceFormat = getOpenAPIVersionFromInfo(info) + if g.sourceFormat == SourceFormatUnknown { + return nil, errors.New("unable to identify OpenAPI version") + } + // Compile to the proto model. + if g.sourceFormat == SourceFormatOpenAPI2 { + document, err := openapi_v2.NewDocument(info, compiler.NewContextWithExtensions("$root", nil, &g.extensionHandlers)) + if err != nil { + return nil, err + } + message = document + } else if g.sourceFormat == SourceFormatOpenAPI3 { + document, err := openapi_v3.NewDocument(info, compiler.NewContextWithExtensions("$root", nil, &g.extensionHandlers)) + if err != nil { + return nil, err + } + message = document + } else { + document, err := discovery_v1.NewDocument(info, compiler.NewContextWithExtensions("$root", nil, &g.extensionHandlers)) + if err != nil { + return nil, err + } + message = document + } + return message, err +} + +// Read an OpenAPI binary file. +func (g *Gnostic) readOpenAPIBinary(data []byte) (message proto.Message, err error) { + // try to read an OpenAPI v3 document + documentV3 := &openapi_v3.Document{} + err = proto.Unmarshal(data, documentV3) + if err == nil && strings.HasPrefix(documentV3.Openapi, "3.0") { + g.sourceFormat = SourceFormatOpenAPI3 + return documentV3, nil + } + // if that failed, try to read an OpenAPI v2 document + documentV2 := &openapi_v2.Document{} + err = proto.Unmarshal(data, documentV2) + if err == nil && strings.HasPrefix(documentV2.Swagger, "2.0") { + g.sourceFormat = SourceFormatOpenAPI2 + return documentV2, nil + } + // if that failed, try to read a Discovery Format document + discoveryDocument := &discovery_v1.Document{} + err = proto.Unmarshal(data, discoveryDocument) + if err == nil { // && strings.HasPrefix(documentV2.Swagger, "2.0") { + g.sourceFormat = SourceFormatDiscovery + return discoveryDocument, nil + } + return nil, err +} + +// Write a binary pb representation. +func (g *Gnostic) writeBinaryOutput(message proto.Message) { + protoBytes, err := proto.Marshal(message) + if err != nil { + writeFile(g.errorOutputPath, g.errorBytes(err), g.sourceName, "errors") + defer os.Exit(-1) + } else { + writeFile(g.binaryOutputPath, protoBytes, g.sourceName, "pb") + } +} + +// Write a text pb representation. +func (g *Gnostic) writeTextOutput(message proto.Message) { + bytes := []byte(proto.MarshalTextString(message)) + writeFile(g.textOutputPath, bytes, g.sourceName, "text") +} + +// Write JSON/YAML OpenAPI representations. +func (g *Gnostic) writeJSONYAMLOutput(message proto.Message) { + // Convert the OpenAPI document into an exportable MapSlice. + var rawInfo yaml.MapSlice + var ok bool + var err error + if g.sourceFormat == SourceFormatOpenAPI2 { + document := message.(*openapi_v2.Document) + rawInfo, ok = document.ToRawInfo().(yaml.MapSlice) + if !ok { + rawInfo = nil + } + } else if g.sourceFormat == SourceFormatOpenAPI3 { + document := message.(*openapi_v3.Document) + rawInfo, ok = document.ToRawInfo().(yaml.MapSlice) + if !ok { + rawInfo = nil + } + } else if g.sourceFormat == SourceFormatDiscovery { + document := message.(*discovery_v1.Document) + rawInfo, ok = document.ToRawInfo().(yaml.MapSlice) + if !ok { + rawInfo = nil + } + } + // Optionally write description in yaml format. + if g.yamlOutputPath != "" { + var bytes []byte + if rawInfo != nil { + bytes, err = yaml.Marshal(rawInfo) + if err != nil { + fmt.Fprintf(os.Stderr, "Error generating yaml output %s\n", err.Error()) + } + writeFile(g.yamlOutputPath, bytes, g.sourceName, "yaml") + } else { + fmt.Fprintf(os.Stderr, "No yaml output available.\n") + } + } + // Optionally write description in json format. + if g.jsonOutputPath != "" { + var bytes []byte + if rawInfo != nil { + bytes, _ = jsonwriter.Marshal(rawInfo) + if err != nil { + fmt.Fprintf(os.Stderr, "Error generating json output %s\n", err.Error()) + } + writeFile(g.jsonOutputPath, bytes, g.sourceName, "json") + } else { + fmt.Fprintf(os.Stderr, "No json output available.\n") + } + } +} + +// Perform all actions specified in the command-line options. +func (g *Gnostic) performActions(message proto.Message) (err error) { + // Optionally resolve internal references. + if g.resolveReferences { + if g.sourceFormat == SourceFormatOpenAPI2 { + document := message.(*openapi_v2.Document) + _, err = document.ResolveReferences(g.sourceName) + } else if g.sourceFormat == SourceFormatOpenAPI3 { + document := message.(*openapi_v3.Document) + _, err = document.ResolveReferences(g.sourceName) + } + if err != nil { + return err + } + } + // Optionally write proto in binary format. + if g.binaryOutputPath != "" { + g.writeBinaryOutput(message) + } + // Optionally write proto in text format. + if g.textOutputPath != "" { + g.writeTextOutput(message) + } + // Optionaly write document in yaml and/or json formats. + if g.yamlOutputPath != "" || g.jsonOutputPath != "" { + g.writeJSONYAMLOutput(message) + } + // Call all specified plugins. + for _, p := range g.pluginCalls { + err := p.perform(message, g.sourceFormat, g.sourceName) + if err != nil { + writeFile(g.errorOutputPath, g.errorBytes(err), g.sourceName, "errors") + defer os.Exit(-1) // run all plugins, even when some have errors + } + } + return nil +} + +func (g *Gnostic) main() { + var err error + g.readOptions() + g.validateOptions() + // Read the OpenAPI source. + bytes, err := compiler.ReadBytesForFile(g.sourceName) + if err != nil { + writeFile(g.errorOutputPath, g.errorBytes(err), g.sourceName, "errors") + os.Exit(-1) + } + extension := strings.ToLower(filepath.Ext(g.sourceName)) + var message proto.Message + if extension == ".json" || extension == ".yaml" { + // Try to read the source as JSON/YAML. + message, err = g.readOpenAPIText(bytes) + if err != nil { + writeFile(g.errorOutputPath, g.errorBytes(err), g.sourceName, "errors") + os.Exit(-1) + } + } else if extension == ".pb" { + // Try to read the source as a binary protocol buffer. + message, err = g.readOpenAPIBinary(bytes) + if err != nil { + writeFile(g.errorOutputPath, g.errorBytes(err), g.sourceName, "errors") + os.Exit(-1) + } + } else { + err = errors.New("unknown file extension. 'json', 'yaml', and 'pb' are accepted") + writeFile(g.errorOutputPath, g.errorBytes(err), g.sourceName, "errors") + os.Exit(-1) + } + // Perform actions specified by command options. + err = g.performActions(message) + if err != nil { + writeFile(g.errorOutputPath, g.errorBytes(err), g.sourceName, "errors") + os.Exit(-1) + } +} + +func main() { + g := newGnostic() + g.main() +} diff --git a/vendor/github.com/googleapis/gnostic/gnostic_test.go b/vendor/github.com/googleapis/gnostic/gnostic_test.go new file mode 100644 index 000000000..22eaee827 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/gnostic_test.go @@ -0,0 +1,453 @@ +package main + +import ( + "io/ioutil" + "os" + "os/exec" + "path/filepath" + "strings" + "testing" +) + +func testCompiler(t *testing.T, inputFile string, referenceFile string, expectErrors bool) { + textFile := strings.Replace(filepath.Base(inputFile), filepath.Ext(inputFile), ".text", 1) + errorsFile := strings.Replace(filepath.Base(inputFile), filepath.Ext(inputFile), ".errors", 1) + // remove any preexisting output files + os.Remove(textFile) + os.Remove(errorsFile) + // run the compiler + var err error + var cmd = exec.Command( + "gnostic", + inputFile, + "--text-out=.", + "--errors-out=.", + "--resolve-refs") + //t.Log(cmd.Args) + err = cmd.Run() + if err != nil && !expectErrors { + t.Logf("Compile failed: %+v", err) + t.FailNow() + } + // verify the output against a reference + var outputFile string + if expectErrors { + outputFile = errorsFile + } else { + outputFile = textFile + } + err = exec.Command("diff", outputFile, referenceFile).Run() + if err != nil { + t.Logf("Diff failed: %+v", err) + t.FailNow() + } else { + // if the test succeeded, clean up + os.Remove(textFile) + os.Remove(errorsFile) + } +} + +func testNormal(t *testing.T, inputFile string, referenceFile string) { + testCompiler(t, inputFile, referenceFile, false) +} + +func testErrors(t *testing.T, inputFile string, referenceFile string) { + testCompiler(t, inputFile, referenceFile, true) +} + +func TestPetstoreJSON(t *testing.T) { + testNormal(t, + "examples/v2.0/json/petstore.json", + "test/v2.0/petstore.text") +} + +func TestPetstoreYAML(t *testing.T) { + testNormal(t, + "examples/v2.0/yaml/petstore.yaml", + "test/v2.0/petstore.text") +} + +func TestSeparateYAML(t *testing.T) { + testNormal(t, + "examples/v2.0/yaml/petstore-separate/spec/swagger.yaml", + "test/v2.0/yaml/petstore-separate/spec/swagger.text") +} + +func TestSeparateJSON(t *testing.T) { + testNormal(t, + "examples/v2.0/json/petstore-separate/spec/swagger.json", + "test/v2.0/yaml/petstore-separate/spec/swagger.text") // yaml and json results should be identical +} + +func TestRemotePetstoreJSON(t *testing.T) { + testNormal(t, + "https://raw.githubusercontent.com/googleapis/openapi-compiler/master/examples/v2.0/json/petstore.json", + "test/v2.0/petstore.text") +} + +func TestRemotePetstoreYAML(t *testing.T) { + testNormal(t, + "https://raw.githubusercontent.com/googleapis/openapi-compiler/master/examples/v2.0/yaml/petstore.yaml", + "test/v2.0/petstore.text") +} + +func TestRemoteSeparateYAML(t *testing.T) { + testNormal(t, + "https://raw.githubusercontent.com/googleapis/openapi-compiler/master/examples/v2.0/yaml/petstore-separate/spec/swagger.yaml", + "test/v2.0/yaml/petstore-separate/spec/swagger.text") +} + +func TestRemoteSeparateJSON(t *testing.T) { + testNormal(t, + "https://raw.githubusercontent.com/googleapis/openapi-compiler/master/examples/v2.0/json/petstore-separate/spec/swagger.json", + "test/v2.0/yaml/petstore-separate/spec/swagger.text") +} + +func TestErrorBadProperties(t *testing.T) { + testErrors(t, + "examples/errors/petstore-badproperties.yaml", + "test/errors/petstore-badproperties.errors") +} + +func TestErrorUnresolvedRefs(t *testing.T) { + testErrors(t, + "examples/errors/petstore-unresolvedrefs.yaml", + "test/errors/petstore-unresolvedrefs.errors") +} + +func TestErrorMissingVersion(t *testing.T) { + testErrors(t, + "examples/errors/petstore-missingversion.yaml", + "test/errors/petstore-missingversion.errors") +} + +func testPlugin(t *testing.T, plugin string, inputFile string, outputFile string, referenceFile string) { + // remove any preexisting output files + os.Remove(outputFile) + // run the compiler + var err error + output, err := exec.Command( + "gnostic", + "--"+plugin+"-out=-", + inputFile).Output() + if err != nil { + t.Logf("Compile failed: %+v", err) + t.FailNow() + } + _ = ioutil.WriteFile(outputFile, output, 0644) + err = exec.Command("diff", outputFile, referenceFile).Run() + if err != nil { + t.Logf("Diff failed: %+v", err) + t.FailNow() + } else { + // if the test succeeded, clean up + os.Remove(outputFile) + } +} + +func TestSamplePluginWithPetstore(t *testing.T) { + testPlugin(t, + "summary", + "examples/v2.0/yaml/petstore.yaml", + "sample-petstore.out", + "test/v2.0/yaml/sample-petstore.out") +} + +func TestErrorInvalidPluginInvocations(t *testing.T) { + var err error + output, err := exec.Command( + "gnostic", + "examples/v2.0/yaml/petstore.yaml", + "--errors-out=-", + "--plugin-out=foo=bar,:abc", + "--plugin-out=,foo=bar:abc", + "--plugin-out=foo=:abc", + "--plugin-out==bar:abc", + "--plugin-out=,,:abc", + "--plugin-out=foo=bar=baz:abc", + ).Output() + if err == nil { + t.Logf("Invalid invocations were accepted") + t.FailNow() + } + outputFile := "invalid-plugin-invocation.errors" + _ = ioutil.WriteFile(outputFile, output, 0644) + err = exec.Command("diff", outputFile, "test/errors/invalid-plugin-invocation.errors").Run() + if err != nil { + t.Logf("Diff failed: %+v", err) + t.FailNow() + } else { + // if the test succeeded, clean up + os.Remove(outputFile) + } +} + +func TestValidPluginInvocations(t *testing.T) { + var err error + output, err := exec.Command( + "gnostic", + "examples/v2.0/yaml/petstore.yaml", + "--errors-out=-", + // verify an invocation with no parameters + "--summary-out=!", // "!" indicates that no output should be generated + // verify single pair of parameters + "--summary-out=a=b:!", + // verify multiple parameters + "--summary-out=a=b,c=123,xyz=alphabetagammadelta:!", + // verify that special characters / . - _ can be included in parameter keys and values + "--summary-out=a/b/c=x/y/z:!", + "--summary-out=a.b.c=x.y.z:!", + "--summary-out=a-b-c=x-y-z:!", + "--summary-out=a_b_c=x_y_z:!", + ).Output() + if len(output) != 0 { + t.Logf("Valid invocations generated invalid errors\n%s", string(output)) + t.FailNow() + } + if err != nil { + t.Logf("Valid invocations were not accepted") + t.FailNow() + } +} + +func TestExtensionHandlerWithLibraryExample(t *testing.T) { + outputFile := "library-example-with-ext.text.out" + inputFile := "test/library-example-with-ext.json" + referenceFile := "test/library-example-with-ext.text.out" + + os.Remove(outputFile) + // run the compiler + var err error + + command := exec.Command( + "gnostic", + "--x-sampleone", + "--x-sampletwo", + "--text-out="+outputFile, + "--resolve-refs", + inputFile) + + _, err = command.Output() + if err != nil { + t.Logf("Compile failed for command %v: %+v", command, err) + t.FailNow() + } + //_ = ioutil.WriteFile(outputFile, output, 0644) + err = exec.Command("diff", outputFile, referenceFile).Run() + if err != nil { + t.Logf("Diff failed: %+v", err) + t.FailNow() + } else { + // if the test succeeded, clean up + os.Remove(outputFile) + } +} + +func TestJSONOutput(t *testing.T) { + inputFile := "test/library-example-with-ext.json" + + textFile := "sample.text" + jsonFile := "sample.json" + textFile2 := "sample2.text" + jsonFile2 := "sample2.json" + + os.Remove(textFile) + os.Remove(jsonFile) + os.Remove(textFile2) + os.Remove(jsonFile2) + + var err error + + // Run the compiler once. + command := exec.Command( + "gnostic", + "--text-out="+textFile, + "--json-out="+jsonFile, + inputFile) + _, err = command.Output() + if err != nil { + t.Logf("Compile failed for command %v: %+v", command, err) + t.FailNow() + } + + // Run the compiler again, this time on the generated output. + command = exec.Command( + "gnostic", + "--text-out="+textFile2, + "--json-out="+jsonFile2, + jsonFile) + _, err = command.Output() + if err != nil { + t.Logf("Compile failed for command %v: %+v", command, err) + t.FailNow() + } + + // Verify that both models have the same internal representation. + err = exec.Command("diff", textFile, textFile2).Run() + if err != nil { + t.Logf("Diff failed: %+v", err) + t.FailNow() + } else { + // if the test succeeded, clean up + os.Remove(textFile) + os.Remove(jsonFile) + os.Remove(textFile2) + os.Remove(jsonFile2) + } +} + +func TestYAMLOutput(t *testing.T) { + inputFile := "test/library-example-with-ext.json" + + textFile := "sample.text" + yamlFile := "sample.yaml" + textFile2 := "sample2.text" + yamlFile2 := "sample2.yaml" + + os.Remove(textFile) + os.Remove(yamlFile) + os.Remove(textFile2) + os.Remove(yamlFile2) + + var err error + + // Run the compiler once. + command := exec.Command( + "gnostic", + "--text-out="+textFile, + "--yaml-out="+yamlFile, + inputFile) + _, err = command.Output() + if err != nil { + t.Logf("Compile failed for command %v: %+v", command, err) + t.FailNow() + } + + // Run the compiler again, this time on the generated output. + command = exec.Command( + "gnostic", + "--text-out="+textFile2, + "--yaml-out="+yamlFile2, + yamlFile) + _, err = command.Output() + if err != nil { + t.Logf("Compile failed for command %v: %+v", command, err) + t.FailNow() + } + + // Verify that both models have the same internal representation. + err = exec.Command("diff", textFile, textFile2).Run() + if err != nil { + t.Logf("Diff failed: %+v", err) + t.FailNow() + } else { + // if the test succeeded, clean up + os.Remove(textFile) + os.Remove(yamlFile) + os.Remove(textFile2) + os.Remove(yamlFile2) + } +} + +func testBuilder(version string, t *testing.T) { + var err error + + pbFile := "petstore-" + version + ".pb" + yamlFile := "petstore.yaml" + jsonFile := "petstore.json" + textFile := "petstore.text" + textReference := "test/" + version + ".0/petstore.text" + + os.Remove(pbFile) + os.Remove(textFile) + os.Remove(yamlFile) + os.Remove(jsonFile) + + // Generate petstore.pb. + command := exec.Command( + "petstore-builder", + "--"+version) + _, err = command.Output() + if err != nil { + t.Logf("Command %v failed: %+v", command, err) + t.FailNow() + } + + // Convert petstore.pb to yaml and json. + command = exec.Command( + "gnostic", + pbFile, + "--json-out="+jsonFile, + "--yaml-out="+yamlFile) + _, err = command.Output() + if err != nil { + t.Logf("Command %v failed: %+v", command, err) + t.FailNow() + } + + // Read petstore.yaml, resolve references, and export text. + command = exec.Command( + "gnostic", + yamlFile, + "--resolve-refs", + "--text-out="+textFile) + _, err = command.Output() + if err != nil { + t.Logf("Command %v failed: %+v", command, err) + t.FailNow() + } + + // Verify that the generated text matches our reference. + err = exec.Command("diff", textFile, textReference).Run() + if err != nil { + t.Logf("Diff failed: %+v", err) + t.FailNow() + } + + // Read petstore.json, resolve references, and export text. + command = exec.Command( + "gnostic", + jsonFile, + "--resolve-refs", + "--text-out="+textFile) + _, err = command.Output() + if err != nil { + t.Logf("Command %v failed: %+v", command, err) + t.FailNow() + } + + // Verify that the generated text matches our reference. + err = exec.Command("diff", textFile, textReference).Run() + if err != nil { + t.Logf("Diff failed: %+v", err) + t.FailNow() + } + + // if the test succeeded, clean up + os.Remove(pbFile) + os.Remove(textFile) + os.Remove(yamlFile) + os.Remove(jsonFile) +} + +func TestBuilderV2(t *testing.T) { + testBuilder("v2", t) +} + +func TestBuilderV3(t *testing.T) { + testBuilder("v3", t) +} + +// OpenAPI 3.0 tests + +func TestPetstoreYAML_30(t *testing.T) { + testNormal(t, + "examples/v3.0/yaml/petstore.yaml", + "test/v3.0/petstore.text") +} + +func TestPetstoreJSON_30(t *testing.T) { + testNormal(t, + "examples/v3.0/json/petstore.json", + "test/v3.0/petstore.text") +} diff --git a/vendor/github.com/googleapis/gnostic/jsonschema/README.md b/vendor/github.com/googleapis/gnostic/jsonschema/README.md new file mode 100644 index 000000000..c0dbe4a93 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/jsonschema/README.md @@ -0,0 +1,3 @@ +# jsonschema + +This directory contains code for reading, writing, and manipulating JSON schemas. \ No newline at end of file diff --git a/vendor/github.com/googleapis/gnostic/jsonschema/display.go b/vendor/github.com/googleapis/gnostic/jsonschema/display.go new file mode 100644 index 000000000..9b5c5611f --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/jsonschema/display.go @@ -0,0 +1,229 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package jsonschema + +import ( + "fmt" + "strings" +) + +// +// DISPLAY +// The following methods display Schemas. +// + +// Description returns a string representation of a string or string array. +func (s *StringOrStringArray) Description() string { + if s.String != nil { + return *s.String + } + if s.StringArray != nil { + return strings.Join(*s.StringArray, ", ") + } + return "" +} + +// Returns a string representation of a Schema. +func (schema *Schema) String() string { + return schema.describeSchema("") +} + +// Helper: Returns a string representation of a Schema indented by a specified string. +func (schema *Schema) describeSchema(indent string) string { + result := "" + if schema.Schema != nil { + result += indent + "$schema: " + *(schema.Schema) + "\n" + } + if schema.ID != nil { + result += indent + "id: " + *(schema.ID) + "\n" + } + if schema.MultipleOf != nil { + result += indent + fmt.Sprintf("multipleOf: %+v\n", *(schema.MultipleOf)) + } + if schema.Maximum != nil { + result += indent + fmt.Sprintf("maximum: %+v\n", *(schema.Maximum)) + } + if schema.ExclusiveMaximum != nil { + result += indent + fmt.Sprintf("exclusiveMaximum: %+v\n", *(schema.ExclusiveMaximum)) + } + if schema.Minimum != nil { + result += indent + fmt.Sprintf("minimum: %+v\n", *(schema.Minimum)) + } + if schema.ExclusiveMinimum != nil { + result += indent + fmt.Sprintf("exclusiveMinimum: %+v\n", *(schema.ExclusiveMinimum)) + } + if schema.MaxLength != nil { + result += indent + fmt.Sprintf("maxLength: %+v\n", *(schema.MaxLength)) + } + if schema.MinLength != nil { + result += indent + fmt.Sprintf("minLength: %+v\n", *(schema.MinLength)) + } + if schema.Pattern != nil { + result += indent + fmt.Sprintf("pattern: %+v\n", *(schema.Pattern)) + } + if schema.AdditionalItems != nil { + s := schema.AdditionalItems.Schema + if s != nil { + result += indent + "additionalItems:\n" + result += s.describeSchema(indent + " ") + } else { + b := *(schema.AdditionalItems.Boolean) + result += indent + fmt.Sprintf("additionalItems: %+v\n", b) + } + } + if schema.Items != nil { + result += indent + "items:\n" + items := schema.Items + if items.SchemaArray != nil { + for i, s := range *(items.SchemaArray) { + result += indent + " " + fmt.Sprintf("%d", i) + ":\n" + result += s.describeSchema(indent + " " + " ") + } + } else if items.Schema != nil { + result += items.Schema.describeSchema(indent + " " + " ") + } + } + if schema.MaxItems != nil { + result += indent + fmt.Sprintf("maxItems: %+v\n", *(schema.MaxItems)) + } + if schema.MinItems != nil { + result += indent + fmt.Sprintf("minItems: %+v\n", *(schema.MinItems)) + } + if schema.UniqueItems != nil { + result += indent + fmt.Sprintf("uniqueItems: %+v\n", *(schema.UniqueItems)) + } + if schema.MaxProperties != nil { + result += indent + fmt.Sprintf("maxProperties: %+v\n", *(schema.MaxProperties)) + } + if schema.MinProperties != nil { + result += indent + fmt.Sprintf("minProperties: %+v\n", *(schema.MinProperties)) + } + if schema.Required != nil { + result += indent + fmt.Sprintf("required: %+v\n", *(schema.Required)) + } + if schema.AdditionalProperties != nil { + s := schema.AdditionalProperties.Schema + if s != nil { + result += indent + "additionalProperties:\n" + result += s.describeSchema(indent + " ") + } else { + b := *(schema.AdditionalProperties.Boolean) + result += indent + fmt.Sprintf("additionalProperties: %+v\n", b) + } + } + if schema.Properties != nil { + result += indent + "properties:\n" + for _, pair := range *(schema.Properties) { + name := pair.Name + s := pair.Value + result += indent + " " + name + ":\n" + result += s.describeSchema(indent + " " + " ") + } + } + if schema.PatternProperties != nil { + result += indent + "patternProperties:\n" + for _, pair := range *(schema.PatternProperties) { + name := pair.Name + s := pair.Value + result += indent + " " + name + ":\n" + result += s.describeSchema(indent + " " + " ") + } + } + if schema.Dependencies != nil { + result += indent + "dependencies:\n" + for _, pair := range *(schema.Dependencies) { + name := pair.Name + schemaOrStringArray := pair.Value + s := schemaOrStringArray.Schema + if s != nil { + result += indent + " " + name + ":\n" + result += s.describeSchema(indent + " " + " ") + } else { + a := schemaOrStringArray.StringArray + if a != nil { + result += indent + " " + name + ":\n" + for _, s2 := range *a { + result += indent + " " + " " + s2 + "\n" + } + } + } + + } + } + if schema.Enumeration != nil { + result += indent + "enumeration:\n" + for _, value := range *(schema.Enumeration) { + if value.String != nil { + result += indent + " " + fmt.Sprintf("%+v\n", *value.String) + } else { + result += indent + " " + fmt.Sprintf("%+v\n", *value.Bool) + } + } + } + if schema.Type != nil { + result += indent + fmt.Sprintf("type: %+v\n", schema.Type.Description()) + } + if schema.AllOf != nil { + result += indent + "allOf:\n" + for _, s := range *(schema.AllOf) { + result += s.describeSchema(indent + " ") + result += indent + "-\n" + } + } + if schema.AnyOf != nil { + result += indent + "anyOf:\n" + for _, s := range *(schema.AnyOf) { + result += s.describeSchema(indent + " ") + result += indent + "-\n" + } + } + if schema.OneOf != nil { + result += indent + "oneOf:\n" + for _, s := range *(schema.OneOf) { + result += s.describeSchema(indent + " ") + result += indent + "-\n" + } + } + if schema.Not != nil { + result += indent + "not:\n" + result += schema.Not.describeSchema(indent + " ") + } + if schema.Definitions != nil { + result += indent + "definitions:\n" + for _, pair := range *(schema.Definitions) { + name := pair.Name + s := pair.Value + result += indent + " " + name + ":\n" + result += s.describeSchema(indent + " " + " ") + } + } + if schema.Title != nil { + result += indent + "title: " + *(schema.Title) + "\n" + } + if schema.Description != nil { + result += indent + "description: " + *(schema.Description) + "\n" + } + if schema.Default != nil { + result += indent + "default:\n" + result += indent + fmt.Sprintf(" %+v\n", *(schema.Default)) + } + if schema.Format != nil { + result += indent + "format: " + *(schema.Format) + "\n" + } + if schema.Ref != nil { + result += indent + "$ref: " + *(schema.Ref) + "\n" + } + return result +} diff --git a/vendor/github.com/googleapis/gnostic/jsonschema/models.go b/vendor/github.com/googleapis/gnostic/jsonschema/models.go new file mode 100644 index 000000000..67f1f9148 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/jsonschema/models.go @@ -0,0 +1,226 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package jsonschema supports the reading, writing, and manipulation +// of JSON Schemas. +package jsonschema + +// The Schema struct models a JSON Schema and, because schemas are +// defined hierarchically, contains many references to itself. +// All fields are pointers and are nil if the associated values +// are not specified. +type Schema struct { + Schema *string // $schema + ID *string // id keyword used for $ref resolution scope + Ref *string // $ref, i.e. JSON Pointers + + // http://json-schema.org/latest/json-schema-validation.html + // 5.1. Validation keywords for numeric instances (number and integer) + MultipleOf *SchemaNumber + Maximum *SchemaNumber + ExclusiveMaximum *bool + Minimum *SchemaNumber + ExclusiveMinimum *bool + + // 5.2. Validation keywords for strings + MaxLength *int64 + MinLength *int64 + Pattern *string + + // 5.3. Validation keywords for arrays + AdditionalItems *SchemaOrBoolean + Items *SchemaOrSchemaArray + MaxItems *int64 + MinItems *int64 + UniqueItems *bool + + // 5.4. Validation keywords for objects + MaxProperties *int64 + MinProperties *int64 + Required *[]string + AdditionalProperties *SchemaOrBoolean + Properties *[]*NamedSchema + PatternProperties *[]*NamedSchema + Dependencies *[]*NamedSchemaOrStringArray + + // 5.5. Validation keywords for any instance type + Enumeration *[]SchemaEnumValue + Type *StringOrStringArray + AllOf *[]*Schema + AnyOf *[]*Schema + OneOf *[]*Schema + Not *Schema + Definitions *[]*NamedSchema + + // 6. Metadata keywords + Title *string + Description *string + Default *interface{} + + // 7. Semantic validation with "format" + Format *string +} + +// These helper structs represent "combination" types that generally can +// have values of one type or another. All are used to represent parts +// of Schemas. + +// SchemaNumber represents a value that can be either an Integer or a Float. +type SchemaNumber struct { + Integer *int64 + Float *float64 +} + +// NewSchemaNumberWithInteger creates and returns a new object +func NewSchemaNumberWithInteger(i int64) *SchemaNumber { + result := &SchemaNumber{} + result.Integer = &i + return result +} + +// NewSchemaNumberWithFloat creates and returns a new object +func NewSchemaNumberWithFloat(f float64) *SchemaNumber { + result := &SchemaNumber{} + result.Float = &f + return result +} + +// SchemaOrBoolean represents a value that can be either a Schema or a Boolean. +type SchemaOrBoolean struct { + Schema *Schema + Boolean *bool +} + +// NewSchemaOrBooleanWithSchema creates and returns a new object +func NewSchemaOrBooleanWithSchema(s *Schema) *SchemaOrBoolean { + result := &SchemaOrBoolean{} + result.Schema = s + return result +} + +// NewSchemaOrBooleanWithBoolean creates and returns a new object +func NewSchemaOrBooleanWithBoolean(b bool) *SchemaOrBoolean { + result := &SchemaOrBoolean{} + result.Boolean = &b + return result +} + +// StringOrStringArray represents a value that can be either +// a String or an Array of Strings. +type StringOrStringArray struct { + String *string + StringArray *[]string +} + +// NewStringOrStringArrayWithString creates and returns a new object +func NewStringOrStringArrayWithString(s string) *StringOrStringArray { + result := &StringOrStringArray{} + result.String = &s + return result +} + +// NewStringOrStringArrayWithStringArray creates and returns a new object +func NewStringOrStringArrayWithStringArray(a []string) *StringOrStringArray { + result := &StringOrStringArray{} + result.StringArray = &a + return result +} + +// SchemaOrStringArray represents a value that can be either +// a Schema or an Array of Strings. +type SchemaOrStringArray struct { + Schema *Schema + StringArray *[]string +} + +// SchemaOrSchemaArray represents a value that can be either +// a Schema or an Array of Schemas. +type SchemaOrSchemaArray struct { + Schema *Schema + SchemaArray *[]*Schema +} + +// NewSchemaOrSchemaArrayWithSchema creates and returns a new object +func NewSchemaOrSchemaArrayWithSchema(s *Schema) *SchemaOrSchemaArray { + result := &SchemaOrSchemaArray{} + result.Schema = s + return result +} + +// NewSchemaOrSchemaArrayWithSchemaArray creates and returns a new object +func NewSchemaOrSchemaArrayWithSchemaArray(a []*Schema) *SchemaOrSchemaArray { + result := &SchemaOrSchemaArray{} + result.SchemaArray = &a + return result +} + +// SchemaEnumValue represents a value that can be part of an +// enumeration in a Schema. +type SchemaEnumValue struct { + String *string + Bool *bool +} + +// NamedSchema is a name-value pair that is used to emulate maps +// with ordered keys. +type NamedSchema struct { + Name string + Value *Schema +} + +// NewNamedSchema creates and returns a new object +func NewNamedSchema(name string, value *Schema) *NamedSchema { + return &NamedSchema{Name: name, Value: value} +} + +// NamedSchemaOrStringArray is a name-value pair that is used +// to emulate maps with ordered keys. +type NamedSchemaOrStringArray struct { + Name string + Value *SchemaOrStringArray +} + +// Access named subschemas by name + +func namedSchemaArrayElementWithName(array *[]*NamedSchema, name string) *Schema { + if array == nil { + return nil + } + for _, pair := range *array { + if pair.Name == name { + return pair.Value + } + } + return nil +} + +// PropertyWithName returns the selected element. +func (s *Schema) PropertyWithName(name string) *Schema { + return namedSchemaArrayElementWithName(s.Properties, name) +} + +// PatternPropertyWithName returns the selected element. +func (s *Schema) PatternPropertyWithName(name string) *Schema { + return namedSchemaArrayElementWithName(s.PatternProperties, name) +} + +// DefinitionWithName returns the selected element. +func (s *Schema) DefinitionWithName(name string) *Schema { + return namedSchemaArrayElementWithName(s.Definitions, name) +} + +// AddProperty adds a named property. +func (s *Schema) AddProperty(name string, property *Schema) { + *s.Properties = append(*s.Properties, NewNamedSchema(name, property)) +} diff --git a/vendor/github.com/googleapis/gnostic/jsonschema/operations.go b/vendor/github.com/googleapis/gnostic/jsonschema/operations.go new file mode 100644 index 000000000..0ce2271e2 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/jsonschema/operations.go @@ -0,0 +1,394 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package jsonschema + +import ( + "fmt" + "log" + "strings" +) + +// +// OPERATIONS +// The following methods perform operations on Schemas. +// + +// IsEmpty returns true if no members of the Schema are specified. +func (schema *Schema) IsEmpty() bool { + return (schema.Schema == nil) && + (schema.ID == nil) && + (schema.MultipleOf == nil) && + (schema.Maximum == nil) && + (schema.ExclusiveMaximum == nil) && + (schema.Minimum == nil) && + (schema.ExclusiveMinimum == nil) && + (schema.MaxLength == nil) && + (schema.MinLength == nil) && + (schema.Pattern == nil) && + (schema.AdditionalItems == nil) && + (schema.Items == nil) && + (schema.MaxItems == nil) && + (schema.MinItems == nil) && + (schema.UniqueItems == nil) && + (schema.MaxProperties == nil) && + (schema.MinProperties == nil) && + (schema.Required == nil) && + (schema.AdditionalProperties == nil) && + (schema.Properties == nil) && + (schema.PatternProperties == nil) && + (schema.Dependencies == nil) && + (schema.Enumeration == nil) && + (schema.Type == nil) && + (schema.AllOf == nil) && + (schema.AnyOf == nil) && + (schema.OneOf == nil) && + (schema.Not == nil) && + (schema.Definitions == nil) && + (schema.Title == nil) && + (schema.Description == nil) && + (schema.Default == nil) && + (schema.Format == nil) && + (schema.Ref == nil) +} + +// IsEqual returns true if two schemas are equal. +func (schema *Schema) IsEqual(schema2 *Schema) bool { + return schema.String() == schema2.String() +} + +// SchemaOperation represents a function that can be applied to a Schema. +type SchemaOperation func(schema *Schema, context string) + +// Applies a specified function to a Schema and all of the Schemas that it contains. +func (schema *Schema) applyToSchemas(operation SchemaOperation, context string) { + + if schema.AdditionalItems != nil { + s := schema.AdditionalItems.Schema + if s != nil { + s.applyToSchemas(operation, "AdditionalItems") + } + } + + if schema.Items != nil { + if schema.Items.SchemaArray != nil { + for _, s := range *(schema.Items.SchemaArray) { + s.applyToSchemas(operation, "Items.SchemaArray") + } + } else if schema.Items.Schema != nil { + schema.Items.Schema.applyToSchemas(operation, "Items.Schema") + } + } + + if schema.AdditionalProperties != nil { + s := schema.AdditionalProperties.Schema + if s != nil { + s.applyToSchemas(operation, "AdditionalProperties") + } + } + + if schema.Properties != nil { + for _, pair := range *(schema.Properties) { + s := pair.Value + s.applyToSchemas(operation, "Properties") + } + } + if schema.PatternProperties != nil { + for _, pair := range *(schema.PatternProperties) { + s := pair.Value + s.applyToSchemas(operation, "PatternProperties") + } + } + + if schema.Dependencies != nil { + for _, pair := range *(schema.Dependencies) { + schemaOrStringArray := pair.Value + s := schemaOrStringArray.Schema + if s != nil { + s.applyToSchemas(operation, "Dependencies") + } + } + } + + if schema.AllOf != nil { + for _, s := range *(schema.AllOf) { + s.applyToSchemas(operation, "AllOf") + } + } + if schema.AnyOf != nil { + for _, s := range *(schema.AnyOf) { + s.applyToSchemas(operation, "AnyOf") + } + } + if schema.OneOf != nil { + for _, s := range *(schema.OneOf) { + s.applyToSchemas(operation, "OneOf") + } + } + if schema.Not != nil { + schema.Not.applyToSchemas(operation, "Not") + } + + if schema.Definitions != nil { + for _, pair := range *(schema.Definitions) { + s := pair.Value + s.applyToSchemas(operation, "Definitions") + } + } + + operation(schema, context) +} + +// CopyProperties copies all non-nil properties from the source Schema to the schema Schema. +func (schema *Schema) CopyProperties(source *Schema) { + if source.Schema != nil { + schema.Schema = source.Schema + } + if source.ID != nil { + schema.ID = source.ID + } + if source.MultipleOf != nil { + schema.MultipleOf = source.MultipleOf + } + if source.Maximum != nil { + schema.Maximum = source.Maximum + } + if source.ExclusiveMaximum != nil { + schema.ExclusiveMaximum = source.ExclusiveMaximum + } + if source.Minimum != nil { + schema.Minimum = source.Minimum + } + if source.ExclusiveMinimum != nil { + schema.ExclusiveMinimum = source.ExclusiveMinimum + } + if source.MaxLength != nil { + schema.MaxLength = source.MaxLength + } + if source.MinLength != nil { + schema.MinLength = source.MinLength + } + if source.Pattern != nil { + schema.Pattern = source.Pattern + } + if source.AdditionalItems != nil { + schema.AdditionalItems = source.AdditionalItems + } + if source.Items != nil { + schema.Items = source.Items + } + if source.MaxItems != nil { + schema.MaxItems = source.MaxItems + } + if source.MinItems != nil { + schema.MinItems = source.MinItems + } + if source.UniqueItems != nil { + schema.UniqueItems = source.UniqueItems + } + if source.MaxProperties != nil { + schema.MaxProperties = source.MaxProperties + } + if source.MinProperties != nil { + schema.MinProperties = source.MinProperties + } + if source.Required != nil { + schema.Required = source.Required + } + if source.AdditionalProperties != nil { + schema.AdditionalProperties = source.AdditionalProperties + } + if source.Properties != nil { + schema.Properties = source.Properties + } + if source.PatternProperties != nil { + schema.PatternProperties = source.PatternProperties + } + if source.Dependencies != nil { + schema.Dependencies = source.Dependencies + } + if source.Enumeration != nil { + schema.Enumeration = source.Enumeration + } + if source.Type != nil { + schema.Type = source.Type + } + if source.AllOf != nil { + schema.AllOf = source.AllOf + } + if source.AnyOf != nil { + schema.AnyOf = source.AnyOf + } + if source.OneOf != nil { + schema.OneOf = source.OneOf + } + if source.Not != nil { + schema.Not = source.Not + } + if source.Definitions != nil { + schema.Definitions = source.Definitions + } + if source.Title != nil { + schema.Title = source.Title + } + if source.Description != nil { + schema.Description = source.Description + } + if source.Default != nil { + schema.Default = source.Default + } + if source.Format != nil { + schema.Format = source.Format + } + if source.Ref != nil { + schema.Ref = source.Ref + } +} + +// TypeIs returns true if the Type of a Schema includes the specified type +func (schema *Schema) TypeIs(typeName string) bool { + if schema.Type != nil { + // the schema Type is either a string or an array of strings + if schema.Type.String != nil { + return (*(schema.Type.String) == typeName) + } else if schema.Type.StringArray != nil { + for _, n := range *(schema.Type.StringArray) { + if n == typeName { + return true + } + } + } + } + return false +} + +// ResolveRefs resolves "$ref" elements in a Schema and its children. +// But if a reference refers to an object type, is inside a oneOf, or contains a oneOf, +// the reference is kept and we expect downstream tools to separately model these +// referenced schemas. +func (schema *Schema) ResolveRefs() { + rootSchema := schema + count := 1 + for count > 0 { + count = 0 + schema.applyToSchemas( + func(schema *Schema, context string) { + if schema.Ref != nil { + resolvedRef, err := rootSchema.resolveJSONPointer(*(schema.Ref)) + if err != nil { + log.Printf("%+v", err) + } else if resolvedRef.TypeIs("object") { + // don't substitute for objects, we'll model the referenced schema with a class + } else if context == "OneOf" { + // don't substitute for references inside oneOf declarations + } else if resolvedRef.OneOf != nil { + // don't substitute for references that contain oneOf declarations + } else if resolvedRef.AdditionalProperties != nil { + // don't substitute for references that look like objects + } else { + schema.Ref = nil + schema.CopyProperties(resolvedRef) + count++ + } + } + }, "") + } +} + +// resolveJSONPointer resolves JSON pointers. +// This current implementation is very crude and custom for OpenAPI 2.0 schemas. +// It panics for any pointer that it is unable to resolve. +func (schema *Schema) resolveJSONPointer(ref string) (result *Schema, err error) { + parts := strings.Split(ref, "#") + if len(parts) == 2 { + documentName := parts[0] + "#" + if documentName == "#" && schema.ID != nil { + documentName = *(schema.ID) + } + path := parts[1] + document := schemas[documentName] + pathParts := strings.Split(path, "/") + + // we currently do a very limited (hard-coded) resolution of certain paths and log errors for missed cases + if len(pathParts) == 1 { + return document, nil + } else if len(pathParts) == 3 { + switch pathParts[1] { + case "definitions": + dictionary := document.Definitions + for _, pair := range *dictionary { + if pair.Name == pathParts[2] { + result = pair.Value + } + } + case "properties": + dictionary := document.Properties + for _, pair := range *dictionary { + if pair.Name == pathParts[2] { + result = pair.Value + } + } + default: + break + } + } + } + if result == nil { + return nil, fmt.Errorf("unresolved pointer: %+v", ref) + } + return result, nil +} + +// ResolveAllOfs replaces "allOf" elements by merging their properties into the parent Schema. +func (schema *Schema) ResolveAllOfs() { + schema.applyToSchemas( + func(schema *Schema, context string) { + if schema.AllOf != nil { + for _, allOf := range *(schema.AllOf) { + schema.CopyProperties(allOf) + } + schema.AllOf = nil + } + }, "resolveAllOfs") +} + +// ResolveAnyOfs replaces all "anyOf" elements with "oneOf". +func (schema *Schema) ResolveAnyOfs() { + schema.applyToSchemas( + func(schema *Schema, context string) { + if schema.AnyOf != nil { + schema.OneOf = schema.AnyOf + schema.AnyOf = nil + } + }, "resolveAnyOfs") +} + +// return a pointer to a copy of a passed-in string +func stringptr(input string) (output *string) { + return &input +} + +// CopyOfficialSchemaProperty copies a named property from the official JSON Schema definition +func (schema *Schema) CopyOfficialSchemaProperty(name string) { + *schema.Properties = append(*schema.Properties, + NewNamedSchema(name, + &Schema{Ref: stringptr("http://json-schema.org/draft-04/schema#/properties/" + name)})) +} + +// CopyOfficialSchemaProperties copies named properties from the official JSON Schema definition +func (schema *Schema) CopyOfficialSchemaProperties(names []string) { + for _, name := range names { + schema.CopyOfficialSchemaProperty(name) + } +} diff --git a/vendor/github.com/googleapis/gnostic/jsonschema/reader.go b/vendor/github.com/googleapis/gnostic/jsonschema/reader.go new file mode 100644 index 000000000..275fdaa16 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/jsonschema/reader.go @@ -0,0 +1,409 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package jsonschema + +import ( + "fmt" + "io/ioutil" + + "gopkg.in/yaml.v2" +) + +// This is a global map of all known Schemas. +// It is initialized when the first Schema is created and inserted. +var schemas map[string]*Schema + +// NewSchemaFromFile reads a schema from a file. +// Currently this assumes that schemas are stored in the source distribution of this project. +func NewSchemaFromFile(filename string) (schema *Schema, err error) { + file, err := ioutil.ReadFile(filename) + if err != nil { + return nil, err + } + var info yaml.MapSlice + err = yaml.Unmarshal(file, &info) + if err != nil { + return nil, err + } + return NewSchemaFromObject(info), nil +} + +// NewSchemaFromObject constructs a schema from a parsed JSON object. +// Due to the complexity of the schema representation, this is a +// custom reader and not the standard Go JSON reader (encoding/json). +func NewSchemaFromObject(jsonData interface{}) *Schema { + switch t := jsonData.(type) { + default: + fmt.Printf("schemaValue: unexpected type %T\n", t) + return nil + case yaml.MapSlice: + schema := &Schema{} + for _, mapItem := range t { + k := mapItem.Key.(string) + v := mapItem.Value + + switch k { + case "$schema": + schema.Schema = schema.stringValue(v) + case "id": + schema.ID = schema.stringValue(v) + + case "multipleOf": + schema.MultipleOf = schema.numberValue(v) + case "maximum": + schema.Maximum = schema.numberValue(v) + case "exclusiveMaximum": + schema.ExclusiveMaximum = schema.boolValue(v) + case "minimum": + schema.Minimum = schema.numberValue(v) + case "exclusiveMinimum": + schema.ExclusiveMinimum = schema.boolValue(v) + + case "maxLength": + schema.MaxLength = schema.intValue(v) + case "minLength": + schema.MinLength = schema.intValue(v) + case "pattern": + schema.Pattern = schema.stringValue(v) + + case "additionalItems": + schema.AdditionalItems = schema.schemaOrBooleanValue(v) + case "items": + schema.Items = schema.schemaOrSchemaArrayValue(v) + case "maxItems": + schema.MaxItems = schema.intValue(v) + case "minItems": + schema.MinItems = schema.intValue(v) + case "uniqueItems": + schema.UniqueItems = schema.boolValue(v) + + case "maxProperties": + schema.MaxProperties = schema.intValue(v) + case "minProperties": + schema.MinProperties = schema.intValue(v) + case "required": + schema.Required = schema.arrayOfStringsValue(v) + case "additionalProperties": + schema.AdditionalProperties = schema.schemaOrBooleanValue(v) + case "properties": + schema.Properties = schema.mapOfSchemasValue(v) + case "patternProperties": + schema.PatternProperties = schema.mapOfSchemasValue(v) + case "dependencies": + schema.Dependencies = schema.mapOfSchemasOrStringArraysValue(v) + + case "enum": + schema.Enumeration = schema.arrayOfEnumValuesValue(v) + + case "type": + schema.Type = schema.stringOrStringArrayValue(v) + case "allOf": + schema.AllOf = schema.arrayOfSchemasValue(v) + case "anyOf": + schema.AnyOf = schema.arrayOfSchemasValue(v) + case "oneOf": + schema.OneOf = schema.arrayOfSchemasValue(v) + case "not": + schema.Not = NewSchemaFromObject(v) + case "definitions": + schema.Definitions = schema.mapOfSchemasValue(v) + + case "title": + schema.Title = schema.stringValue(v) + case "description": + schema.Description = schema.stringValue(v) + + case "default": + schema.Default = &v + + case "format": + schema.Format = schema.stringValue(v) + case "$ref": + schema.Ref = schema.stringValue(v) + default: + fmt.Printf("UNSUPPORTED (%s)\n", k) + } + } + + // insert schema in global map + if schema.ID != nil { + if schemas == nil { + schemas = make(map[string]*Schema, 0) + } + schemas[*(schema.ID)] = schema + } + return schema + } + return nil +} + +// +// BUILDERS +// The following methods build elements of Schemas from interface{} values. +// Each returns nil if it is unable to build the desired element. +// + +// Gets the string value of an interface{} value if possible. +func (schema *Schema) stringValue(v interface{}) *string { + switch v := v.(type) { + default: + fmt.Printf("stringValue: unexpected type %T\n", v) + case string: + return &v + } + return nil +} + +// Gets the numeric value of an interface{} value if possible. +func (schema *Schema) numberValue(v interface{}) *SchemaNumber { + number := &SchemaNumber{} + switch v := v.(type) { + default: + fmt.Printf("numberValue: unexpected type %T\n", v) + case float64: + v2 := float64(v) + number.Float = &v2 + return number + case float32: + v2 := float64(v) + number.Float = &v2 + return number + case int: + v2 := int64(v) + number.Integer = &v2 + } + return nil +} + +// Gets the integer value of an interface{} value if possible. +func (schema *Schema) intValue(v interface{}) *int64 { + switch v := v.(type) { + default: + fmt.Printf("intValue: unexpected type %T\n", v) + case float64: + v2 := int64(v) + return &v2 + case int64: + return &v + case int: + v2 := int64(v) + return &v2 + } + return nil +} + +// Gets the bool value of an interface{} value if possible. +func (schema *Schema) boolValue(v interface{}) *bool { + switch v := v.(type) { + default: + fmt.Printf("boolValue: unexpected type %T\n", v) + case bool: + return &v + } + return nil +} + +// Gets a map of Schemas from an interface{} value if possible. +func (schema *Schema) mapOfSchemasValue(v interface{}) *[]*NamedSchema { + switch v := v.(type) { + default: + fmt.Printf("mapOfSchemasValue: unexpected type %T\n", v) + case yaml.MapSlice: + m := make([]*NamedSchema, 0) + for _, mapItem := range v { + k2 := mapItem.Key.(string) + v2 := mapItem.Value + pair := &NamedSchema{Name: k2, Value: NewSchemaFromObject(v2)} + m = append(m, pair) + } + return &m + } + return nil +} + +// Gets an array of Schemas from an interface{} value if possible. +func (schema *Schema) arrayOfSchemasValue(v interface{}) *[]*Schema { + switch v := v.(type) { + default: + fmt.Printf("arrayOfSchemasValue: unexpected type %T\n", v) + case []interface{}: + m := make([]*Schema, 0) + for _, v2 := range v { + switch v2 := v2.(type) { + default: + fmt.Printf("arrayOfSchemasValue: unexpected type %T\n", v2) + case yaml.MapSlice: + s := NewSchemaFromObject(v2) + m = append(m, s) + } + } + return &m + case yaml.MapSlice: + m := make([]*Schema, 0) + s := NewSchemaFromObject(v) + m = append(m, s) + return &m + } + return nil +} + +// Gets a Schema or an array of Schemas from an interface{} value if possible. +func (schema *Schema) schemaOrSchemaArrayValue(v interface{}) *SchemaOrSchemaArray { + switch v := v.(type) { + default: + fmt.Printf("schemaOrSchemaArrayValue: unexpected type %T\n", v) + case []interface{}: + m := make([]*Schema, 0) + for _, v2 := range v { + switch v2 := v2.(type) { + default: + fmt.Printf("schemaOrSchemaArrayValue: unexpected type %T\n", v2) + case map[string]interface{}: + s := NewSchemaFromObject(v2) + m = append(m, s) + } + } + return &SchemaOrSchemaArray{SchemaArray: &m} + case yaml.MapSlice: + s := NewSchemaFromObject(v) + return &SchemaOrSchemaArray{Schema: s} + } + return nil +} + +// Gets an array of strings from an interface{} value if possible. +func (schema *Schema) arrayOfStringsValue(v interface{}) *[]string { + switch v := v.(type) { + default: + fmt.Printf("arrayOfStringsValue: unexpected type %T\n", v) + case []string: + return &v + case string: + a := []string{v} + return &a + case []interface{}: + a := make([]string, 0) + for _, v2 := range v { + switch v2 := v2.(type) { + default: + fmt.Printf("arrayOfStringsValue: unexpected type %T\n", v2) + case string: + a = append(a, v2) + } + } + return &a + } + return nil +} + +// Gets a string or an array of strings from an interface{} value if possible. +func (schema *Schema) stringOrStringArrayValue(v interface{}) *StringOrStringArray { + switch v := v.(type) { + default: + fmt.Printf("arrayOfStringsValue: unexpected type %T\n", v) + case []string: + s := &StringOrStringArray{} + s.StringArray = &v + return s + case string: + s := &StringOrStringArray{} + s.String = &v + return s + case []interface{}: + a := make([]string, 0) + for _, v2 := range v { + switch v2 := v2.(type) { + default: + fmt.Printf("arrayOfStringsValue: unexpected type %T\n", v2) + case string: + a = append(a, v2) + } + } + s := &StringOrStringArray{} + s.StringArray = &a + return s + } + return nil +} + +// Gets an array of enum values from an interface{} value if possible. +func (schema *Schema) arrayOfEnumValuesValue(v interface{}) *[]SchemaEnumValue { + a := make([]SchemaEnumValue, 0) + switch v := v.(type) { + default: + fmt.Printf("arrayOfEnumValuesValue: unexpected type %T\n", v) + case []interface{}: + for _, v2 := range v { + switch v2 := v2.(type) { + default: + fmt.Printf("arrayOfEnumValuesValue: unexpected type %T\n", v2) + case string: + a = append(a, SchemaEnumValue{String: &v2}) + case bool: + a = append(a, SchemaEnumValue{Bool: &v2}) + } + } + } + return &a +} + +// Gets a map of schemas or string arrays from an interface{} value if possible. +func (schema *Schema) mapOfSchemasOrStringArraysValue(v interface{}) *[]*NamedSchemaOrStringArray { + m := make([]*NamedSchemaOrStringArray, 0) + switch v := v.(type) { + default: + fmt.Printf("mapOfSchemasOrStringArraysValue: unexpected type %T %+v\n", v, v) + case yaml.MapSlice: + for _, mapItem := range v { + k2 := mapItem.Key.(string) + v2 := mapItem.Value + switch v2 := v2.(type) { + default: + fmt.Printf("mapOfSchemasOrStringArraysValue: unexpected type %T %+v\n", v2, v2) + case []interface{}: + a := make([]string, 0) + for _, v3 := range v2 { + switch v3 := v3.(type) { + default: + fmt.Printf("mapOfSchemasOrStringArraysValue: unexpected type %T %+v\n", v3, v3) + case string: + a = append(a, v3) + } + } + s := &SchemaOrStringArray{} + s.StringArray = &a + pair := &NamedSchemaOrStringArray{Name: k2, Value: s} + m = append(m, pair) + } + } + } + return &m +} + +// Gets a schema or a boolean value from an interface{} value if possible. +func (schema *Schema) schemaOrBooleanValue(v interface{}) *SchemaOrBoolean { + schemaOrBoolean := &SchemaOrBoolean{} + switch v := v.(type) { + case bool: + schemaOrBoolean.Boolean = &v + case yaml.MapSlice: + schemaOrBoolean.Schema = NewSchemaFromObject(v) + default: + fmt.Printf("schemaOrBooleanValue: unexpected type %T\n", v) + case []map[string]interface{}: + + } + return schemaOrBoolean +} diff --git a/vendor/github.com/googleapis/gnostic/jsonschema/schema.json b/vendor/github.com/googleapis/gnostic/jsonschema/schema.json new file mode 100644 index 000000000..85eb502a6 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/jsonschema/schema.json @@ -0,0 +1,150 @@ +{ + "id": "http://json-schema.org/draft-04/schema#", + "$schema": "http://json-schema.org/draft-04/schema#", + "description": "Core schema meta-schema", + "definitions": { + "schemaArray": { + "type": "array", + "minItems": 1, + "items": { "$ref": "#" } + }, + "positiveInteger": { + "type": "integer", + "minimum": 0 + }, + "positiveIntegerDefault0": { + "allOf": [ { "$ref": "#/definitions/positiveInteger" }, { "default": 0 } ] + }, + "simpleTypes": { + "enum": [ "array", "boolean", "integer", "null", "number", "object", "string" ] + }, + "stringArray": { + "type": "array", + "items": { "type": "string" }, + "minItems": 1, + "uniqueItems": true + } + }, + "type": "object", + "properties": { + "id": { + "type": "string", + "format": "uri" + }, + "$schema": { + "type": "string", + "format": "uri" + }, + "title": { + "type": "string" + }, + "description": { + "type": "string" + }, + "default": {}, + "multipleOf": { + "type": "number", + "minimum": 0, + "exclusiveMinimum": true + }, + "maximum": { + "type": "number" + }, + "exclusiveMaximum": { + "type": "boolean", + "default": false + }, + "minimum": { + "type": "number" + }, + "exclusiveMinimum": { + "type": "boolean", + "default": false + }, + "maxLength": { "$ref": "#/definitions/positiveInteger" }, + "minLength": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "pattern": { + "type": "string", + "format": "regex" + }, + "additionalItems": { + "anyOf": [ + { "type": "boolean" }, + { "$ref": "#" } + ], + "default": {} + }, + "items": { + "anyOf": [ + { "$ref": "#" }, + { "$ref": "#/definitions/schemaArray" } + ], + "default": {} + }, + "maxItems": { "$ref": "#/definitions/positiveInteger" }, + "minItems": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "uniqueItems": { + "type": "boolean", + "default": false + }, + "maxProperties": { "$ref": "#/definitions/positiveInteger" }, + "minProperties": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "required": { "$ref": "#/definitions/stringArray" }, + "additionalProperties": { + "anyOf": [ + { "type": "boolean" }, + { "$ref": "#" } + ], + "default": {} + }, + "definitions": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "properties": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "patternProperties": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "dependencies": { + "type": "object", + "additionalProperties": { + "anyOf": [ + { "$ref": "#" }, + { "$ref": "#/definitions/stringArray" } + ] + } + }, + "enum": { + "type": "array", + "minItems": 1, + "uniqueItems": true + }, + "type": { + "anyOf": [ + { "$ref": "#/definitions/simpleTypes" }, + { + "type": "array", + "items": { "$ref": "#/definitions/simpleTypes" }, + "minItems": 1, + "uniqueItems": true + } + ] + }, + "allOf": { "$ref": "#/definitions/schemaArray" }, + "anyOf": { "$ref": "#/definitions/schemaArray" }, + "oneOf": { "$ref": "#/definitions/schemaArray" }, + "not": { "$ref": "#" } + }, + "dependencies": { + "exclusiveMaximum": [ "maximum" ], + "exclusiveMinimum": [ "minimum" ] + }, + "default": {} +} diff --git a/vendor/github.com/googleapis/gnostic/jsonschema/writer.go b/vendor/github.com/googleapis/gnostic/jsonschema/writer.go new file mode 100644 index 000000000..c82f3064b --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/jsonschema/writer.go @@ -0,0 +1,334 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package jsonschema + +import ( + "fmt" + "gopkg.in/yaml.v2" +) + +const indentation = " " + +func renderMap(info interface{}, indent string) (result string) { + result = "{\n" + innerIndent := indent + indentation + switch pairs := info.(type) { + case yaml.MapSlice: + for i, pair := range pairs { + // first print the key + result += fmt.Sprintf("%s\"%+v\": ", innerIndent, pair.Key) + // then the value + switch value := pair.Value.(type) { + case string: + result += "\"" + value + "\"" + case bool: + if value { + result += "true" + } else { + result += "false" + } + case []interface{}: + result += renderArray(value, innerIndent) + case yaml.MapSlice: + result += renderMap(value, innerIndent) + case int: + result += fmt.Sprintf("%d", value) + case int64: + result += fmt.Sprintf("%d", value) + case []string: + result += renderStringArray(value, innerIndent) + default: + result += fmt.Sprintf("???MapItem(%+v, %T)", value, value) + } + if i < len(pairs)-1 { + result += "," + } + result += "\n" + } + default: + // t is some other type that we didn't name. + } + + result += indent + "}" + return result +} + +func renderArray(array []interface{}, indent string) (result string) { + result = "[\n" + innerIndent := indent + indentation + for i, item := range array { + switch item := item.(type) { + case string: + result += innerIndent + "\"" + item + "\"" + case bool: + if item { + result += innerIndent + "true" + } else { + result += innerIndent + "false" + } + case yaml.MapSlice: + result += innerIndent + renderMap(item, innerIndent) + "" + default: + result += innerIndent + fmt.Sprintf("???ArrayItem(%+v)", item) + } + if i < len(array)-1 { + result += "," + } + result += "\n" + } + result += indent + "]" + return result +} + +func renderStringArray(array []string, indent string) (result string) { + result = "[\n" + innerIndent := indent + indentation + for i, item := range array { + result += innerIndent + "\"" + item + "\"" + if i < len(array)-1 { + result += "," + } + result += "\n" + } + result += indent + "]" + return result +} + +func render(info yaml.MapSlice) string { + return renderMap(info, "") + "\n" +} + +func (object *SchemaNumber) jsonValue() interface{} { + if object.Integer != nil { + return object.Integer + } else if object.Float != nil { + return object.Float + } else { + return nil + } +} + +func (object *SchemaOrBoolean) jsonValue() interface{} { + if object.Schema != nil { + return object.Schema.jsonValue() + } else if object.Boolean != nil { + return *object.Boolean + } else { + return nil + } +} + +func (object *StringOrStringArray) jsonValue() interface{} { + if object.String != nil { + return *object.String + } else if object.StringArray != nil { + array := make([]interface{}, 0) + for _, item := range *(object.StringArray) { + array = append(array, item) + } + return array + } else { + return nil + } +} + +func (object *SchemaOrStringArray) jsonValue() interface{} { + if object.Schema != nil { + return object.Schema.jsonValue() + } else if object.StringArray != nil { + array := make([]interface{}, 0) + for _, item := range *(object.StringArray) { + array = append(array, item) + } + return array + } else { + return nil + } +} + +func (object *SchemaOrSchemaArray) jsonValue() interface{} { + if object.Schema != nil { + return object.Schema.jsonValue() + } else if object.SchemaArray != nil { + array := make([]interface{}, 0) + for _, item := range *(object.SchemaArray) { + array = append(array, item.jsonValue()) + } + return array + } else { + return nil + } +} + +func (object *SchemaEnumValue) jsonValue() interface{} { + if object.String != nil { + return *object.String + } else if object.Bool != nil { + return *object.Bool + } else { + return nil + } +} + +func namedSchemaArrayValue(array *[]*NamedSchema) interface{} { + m2 := yaml.MapSlice{} + for _, pair := range *(array) { + var item2 yaml.MapItem + item2.Key = pair.Name + item2.Value = pair.Value.jsonValue() + m2 = append(m2, item2) + } + return m2 +} + +func namedSchemaOrStringArrayValue(array *[]*NamedSchemaOrStringArray) interface{} { + m2 := yaml.MapSlice{} + for _, pair := range *(array) { + var item2 yaml.MapItem + item2.Key = pair.Name + item2.Value = pair.Value.jsonValue() + m2 = append(m2, item2) + } + return m2 +} + +func schemaEnumArrayValue(array *[]SchemaEnumValue) []interface{} { + a := make([]interface{}, 0) + for _, item := range *array { + a = append(a, item.jsonValue()) + } + return a +} + +func schemaArrayValue(array *[]*Schema) []interface{} { + a := make([]interface{}, 0) + for _, item := range *array { + a = append(a, item.jsonValue()) + } + return a +} + +func (schema *Schema) jsonValue() yaml.MapSlice { + m := yaml.MapSlice{} + if schema.Title != nil { + m = append(m, yaml.MapItem{"title", *schema.Title}) + } + if schema.ID != nil { + m = append(m, yaml.MapItem{"id", *schema.ID}) + } + if schema.Schema != nil { + m = append(m, yaml.MapItem{"$schema", *schema.Schema}) + } + if schema.Type != nil { + m = append(m, yaml.MapItem{"type", schema.Type.jsonValue()}) + } + if schema.Items != nil { + m = append(m, yaml.MapItem{"items", schema.Items.jsonValue()}) + } + if schema.Description != nil { + m = append(m, yaml.MapItem{"description", *schema.Description}) + } + if schema.Required != nil { + m = append(m, yaml.MapItem{"required", *schema.Required}) + } + if schema.AdditionalProperties != nil { + m = append(m, yaml.MapItem{"additionalProperties", schema.AdditionalProperties.jsonValue()}) + } + if schema.PatternProperties != nil { + m = append(m, yaml.MapItem{"patternProperties", namedSchemaArrayValue(schema.PatternProperties)}) + } + if schema.Properties != nil { + m = append(m, yaml.MapItem{"properties", namedSchemaArrayValue(schema.Properties)}) + } + if schema.Dependencies != nil { + m = append(m, yaml.MapItem{"dependencies", namedSchemaOrStringArrayValue(schema.Dependencies)}) + } + if schema.Ref != nil { + m = append(m, yaml.MapItem{"$ref", *schema.Ref}) + } + if schema.MultipleOf != nil { + m = append(m, yaml.MapItem{"multipleOf", schema.MultipleOf.jsonValue()}) + } + if schema.Maximum != nil { + m = append(m, yaml.MapItem{"maximum", schema.Maximum.jsonValue()}) + } + if schema.ExclusiveMaximum != nil { + m = append(m, yaml.MapItem{"exclusiveMaximum", *schema.ExclusiveMaximum}) + } + if schema.Minimum != nil { + m = append(m, yaml.MapItem{"minimum", schema.Minimum.jsonValue()}) + } + if schema.ExclusiveMinimum != nil { + m = append(m, yaml.MapItem{"exclusiveMinimum", *schema.ExclusiveMinimum}) + } + if schema.MaxLength != nil { + m = append(m, yaml.MapItem{"maxLength", *schema.MaxLength}) + } + if schema.MinLength != nil { + m = append(m, yaml.MapItem{"minLength", *schema.MinLength}) + } + if schema.Pattern != nil { + m = append(m, yaml.MapItem{"pattern", *schema.Pattern}) + } + if schema.AdditionalItems != nil { + m = append(m, yaml.MapItem{"additionalItems", schema.AdditionalItems.jsonValue()}) + } + if schema.MaxItems != nil { + m = append(m, yaml.MapItem{"maxItems", *schema.MaxItems}) + } + if schema.MinItems != nil { + m = append(m, yaml.MapItem{"minItems", *schema.MinItems}) + } + if schema.UniqueItems != nil { + m = append(m, yaml.MapItem{"uniqueItems", *schema.UniqueItems}) + } + if schema.MaxProperties != nil { + m = append(m, yaml.MapItem{"maxProperties", *schema.MaxProperties}) + } + if schema.MinProperties != nil { + m = append(m, yaml.MapItem{"minProperties", *schema.MinProperties}) + } + if schema.Enumeration != nil { + m = append(m, yaml.MapItem{"enum", schemaEnumArrayValue(schema.Enumeration)}) + } + if schema.AllOf != nil { + m = append(m, yaml.MapItem{"allOf", schemaArrayValue(schema.AllOf)}) + } + if schema.AnyOf != nil { + m = append(m, yaml.MapItem{"anyOf", schemaArrayValue(schema.AnyOf)}) + } + if schema.OneOf != nil { + m = append(m, yaml.MapItem{"oneOf", schemaArrayValue(schema.OneOf)}) + } + if schema.Not != nil { + m = append(m, yaml.MapItem{"not", schema.Not.jsonValue()}) + } + if schema.Definitions != nil { + m = append(m, yaml.MapItem{"definitions", namedSchemaArrayValue(schema.Definitions)}) + } + if schema.Default != nil { + m = append(m, yaml.MapItem{"default", *schema.Default}) + } + if schema.Format != nil { + m = append(m, yaml.MapItem{"format", *schema.Format}) + } + return m +} + +// JSONString returns a json representation of a schema. +func (schema *Schema) JSONString() string { + info := schema.jsonValue() + return render(info) +} diff --git a/vendor/github.com/googleapis/gnostic/jsonwriter/README.md b/vendor/github.com/googleapis/gnostic/jsonwriter/README.md new file mode 100644 index 000000000..04700dc82 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/jsonwriter/README.md @@ -0,0 +1,3 @@ +# jsonwriter + +This directory contains code for writing yaml.MapSlice structures as JSON files. diff --git a/vendor/github.com/googleapis/gnostic/jsonwriter/writer.go b/vendor/github.com/googleapis/gnostic/jsonwriter/writer.go new file mode 100644 index 000000000..c070cd686 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/jsonwriter/writer.go @@ -0,0 +1,168 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package jsonwriter + +import ( + "bytes" + "errors" + "fmt" + "strings" + + "gopkg.in/yaml.v2" +) + +const indentation = " " + +// basic escaping, will need to be improved or replaced +func escape(s string) string { + s = strings.Replace(s, "\n", "\\n", -1) + s = strings.Replace(s, "\"", "\\\"", -1) + return s +} + +type writer struct { + b bytes.Buffer +} + +func (w *writer) bytes() []byte { + return w.b.Bytes() +} + +func (w *writer) writeString(s string) { + w.b.Write([]byte(s)) +} + +func (w *writer) writeMap(info interface{}, indent string) { + w.writeString("{\n") + innerIndent := indent + indentation + switch pairs := info.(type) { + case yaml.MapSlice: + for i, pair := range pairs { + // first print the key + w.writeString(fmt.Sprintf("%s\"%+v\": ", innerIndent, pair.Key)) + // then the value + switch value := pair.Value.(type) { + case string: + w.writeString("\"") + w.writeString(escape(value)) + w.writeString("\"") + case bool: + if value { + w.writeString("true") + } else { + w.writeString("false") + } + case []interface{}: + w.writeArray(value, innerIndent) + case yaml.MapSlice: + w.writeMap(value, innerIndent) + case int: + w.writeString(fmt.Sprintf("%d", value)) + case int64: + w.writeString(fmt.Sprintf("%d", value)) + case []string: + w.writeStringArray(value, innerIndent) + case float64: + w.writeString(fmt.Sprintf("%f", value)) + case []yaml.MapSlice: + w.writeMapSliceArray(value, innerIndent) + default: + w.writeString(fmt.Sprintf("???MapItem(%+v, %T)", value, value)) + } + if i < len(pairs)-1 { + w.writeString(",") + } + w.writeString("\n") + } + default: + // t is some other type that we didn't name. + } + w.writeString(indent) + w.writeString("}") +} + +func (w *writer) writeArray(array []interface{}, indent string) { + w.writeString("[\n") + innerIndent := indent + indentation + for i, item := range array { + w.writeString(innerIndent) + switch item := item.(type) { + case string: + w.writeString("\"") + w.writeString(item) + w.writeString("\"") + case bool: + if item { + w.writeString("true") + } else { + w.writeString("false") + } + case yaml.MapSlice: + w.writeMap(item, innerIndent) + default: + w.writeString(fmt.Sprintf("???ArrayItem(%+v)", item)) + } + if i < len(array)-1 { + w.writeString(",") + } + w.writeString("\n") + } + w.writeString(indent) + w.writeString("]") +} + +func (w *writer) writeStringArray(array []string, indent string) { + w.writeString("[\n") + innerIndent := indent + indentation + for i, item := range array { + w.writeString(innerIndent) + w.writeString("\"") + w.writeString(escape(item)) + w.writeString("\"") + if i < len(array)-1 { + w.writeString(",") + } + w.writeString("\n") + } + w.writeString(indent) + w.writeString("]") +} + +func (w *writer) writeMapSliceArray(array []yaml.MapSlice, indent string) { + w.writeString("[\n") + innerIndent := indent + indentation + for i, item := range array { + w.writeString(innerIndent) + w.writeMap(item, innerIndent) + if i < len(array)-1 { + w.writeString(",") + } + w.writeString("\n") + } + w.writeString(indent) + w.writeString("]") +} + +// Marshal writes a yaml.MapSlice as JSON +func Marshal(in interface{}) (out []byte, err error) { + var w writer + m, ok := in.(yaml.MapSlice) + if !ok { + return nil, errors.New("invalid type passed to Marshal") + } + w.writeMap(m, "") + w.writeString("\n") + return w.bytes(), err +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/README.md b/vendor/github.com/googleapis/gnostic/plugins/README.md new file mode 100644 index 000000000..68cbc942b --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/README.md @@ -0,0 +1,7 @@ +# Plugins + +This directory contains support code for building Gnostic plugins and associated examples. + +Plugins are used to process API descriptions and can perform tasks like documentation and +code generation. Plugins can be written in any language that is supported by the Protocol +Buffer tools. diff --git a/vendor/github.com/googleapis/gnostic/plugins/environment.go b/vendor/github.com/googleapis/gnostic/plugins/environment.go new file mode 100644 index 000000000..9a3bf9865 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/environment.go @@ -0,0 +1,189 @@ +package gnostic_plugin_v1 + +import ( + "flag" + "fmt" + "io" + "io/ioutil" + "log" + "os" + "path" + + "github.com/golang/protobuf/proto" + + openapiv2 "github.com/googleapis/gnostic/OpenAPIv2" + openapiv3 "github.com/googleapis/gnostic/OpenAPIv3" +) + +// Environment contains the environment of a plugin call. +type Environment struct { + Request *Request // plugin request object + Response *Response // response message + Invocation string // string representation of call + RunningAsPlugin bool // true if app is being run as a plugin +} + +// NewEnvironment creates a plugin context from arguments and standard input. +func NewEnvironment() (env *Environment, err error) { + env = &Environment{ + Invocation: os.Args[0], + Response: &Response{}, + } + + input := flag.String("input", "", "API description (in binary protocol buffer form)") + output := flag.String("output", "-", "Output file or directory") + plugin := flag.Bool("plugin", false, "Run as a gnostic plugin (other flags are ignored).") + flag.Parse() + + env.RunningAsPlugin = *plugin + programName := path.Base(os.Args[0]) + + if (*input == "") && !*plugin { + flag.Usage = func() { + fmt.Fprintf(os.Stderr, "\n") + fmt.Fprintf(os.Stderr, programName+" is a gnostic plugin.\n") + fmt.Fprintf(os.Stderr, ` +When it is run from gnostic, the -plugin option is specified and gnostic +writes a binary request to stdin and waits for a binary response on stdout. + +This program can also be run standalone using the other flags listed below. +When the -plugin option is specified, these flags are ignored.`) + fmt.Fprintf(os.Stderr, "\n\nUsage:\n") + flag.PrintDefaults() + } + flag.Usage() + os.Exit(0) + } + + if env.RunningAsPlugin { + // Handle invocation as a plugin. + + // Read the plugin input. + pluginData, err := ioutil.ReadAll(os.Stdin) + env.RespondAndExitIfError(err) + if len(pluginData) == 0 { + env.RespondAndExitIfError(fmt.Errorf("no input data")) + } + + // Deserialize the request from the input. + request := &Request{} + err = proto.Unmarshal(pluginData, request) + env.RespondAndExitIfError(err) + + // Collect parameters passed to the plugin. + parameters := request.Parameters + for _, parameter := range parameters { + env.Invocation += " " + parameter.Name + "=" + parameter.Value + } + + // Log the invocation. + log.Printf("Running plugin %s", env.Invocation) + + env.Request = request + + } else { + // Handle invocation from the command line. + + // Read the input document. + apiData, err := ioutil.ReadFile(*input) + if len(apiData) == 0 { + env.RespondAndExitIfError(fmt.Errorf("no input data")) + } + + env.Request = &Request{} + env.Request.OutputPath = *output + env.Request.SourceName = path.Base(*input) + + // First try to unmarshal OpenAPI v2. + documentv2 := &openapiv2.Document{} + err = proto.Unmarshal(apiData, documentv2) + if err == nil { + env.Request.Openapi2 = documentv2 + } else { + // ignore deserialization errors + } + + // Then try to unmarshal OpenAPI v3. + documentv3 := &openapiv3.Document{} + err = proto.Unmarshal(apiData, documentv3) + if err == nil { + env.Request.Openapi3 = documentv3 + } else { + // ignore deserialization errors + } + + } + return env, err +} + +// RespondAndExitIfError checks an error and if it is non-nil, records it and serializes and returns the response and then exits. +func (env *Environment) RespondAndExitIfError(err error) { + if err != nil { + env.Response.Errors = append(env.Response.Errors, err.Error()) + env.RespondAndExit() + } +} + +// RespondAndExit serializes and returns the plugin response and then exits. +func (env *Environment) RespondAndExit() { + if env.RunningAsPlugin { + responseBytes, _ := proto.Marshal(env.Response) + os.Stdout.Write(responseBytes) + } else { + err := HandleResponse(env.Response, env.Request.OutputPath) + if err != nil { + log.Printf("%s", err.Error()) + } + } + os.Exit(0) +} + +func HandleResponse(response *Response, outputLocation string) error { + if response.Errors != nil { + return fmt.Errorf("Plugin error: %+v", response.Errors) + } + + // Write files to the specified directory. + var writer io.Writer + switch { + case outputLocation == "!": + // Write nothing. + case outputLocation == "-": + writer = os.Stdout + for _, file := range response.Files { + writer.Write([]byte("\n\n" + file.Name + " -------------------- \n")) + writer.Write(file.Data) + } + case isFile(outputLocation): + return fmt.Errorf("unable to overwrite %s", outputLocation) + default: // write files into a directory named by outputLocation + if !isDirectory(outputLocation) { + os.Mkdir(outputLocation, 0755) + } + for _, file := range response.Files { + p := outputLocation + "/" + file.Name + dir := path.Dir(p) + os.MkdirAll(dir, 0755) + f, _ := os.Create(p) + defer f.Close() + f.Write(file.Data) + } + } + return nil +} + +func isFile(path string) bool { + fileInfo, err := os.Stat(path) + if err != nil { + return false + } + return !fileInfo.IsDir() +} + +func isDirectory(path string) bool { + fileInfo, err := os.Stat(path) + if err != nil { + return false + } + return fileInfo.IsDir() +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-analyze/README.md b/vendor/github.com/googleapis/gnostic/plugins/gnostic-analyze/README.md new file mode 100644 index 000000000..f4567fe39 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-analyze/README.md @@ -0,0 +1,23 @@ +# gnostic-analyze + +This directory contains a `gnostic` plugin that analyzes an OpenAPI description for factors +that might influence code generation and other API automation. + +The plugin can be invoked like this: + + gnostic bookstore.json --analyze_out=. + +This will write analysis results to a file in the current directory. +Results are written to a file named `summary.json`. + +The plugin can be applied to a directory of descriptions using a command +like the following: + + find APIs -name "swagger.yaml" -exec gnostic --analyze_out=analysis {} \; + +This finds all `swagger.yaml` files in a directory named `APIs` and its subdirectories +and writes corresponding `summary.json` files into a directory named `analysis`. + +Results of multiple analysis runs can be gathered together and summarized +using the `summarize` program, which is in the `summarize` subdirectory. +Just run `summarize` in the same location as the `find` command shown above. diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-analyze/main.go b/vendor/github.com/googleapis/gnostic/plugins/gnostic-analyze/main.go new file mode 100644 index 000000000..09a86f7bd --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-analyze/main.go @@ -0,0 +1,82 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// gnostic_analyze is a tool for analyzing OpenAPI descriptions. +// +// It scans an API description and evaluates properties +// that influence the ease and quality of code generation. +// - The number of HTTP operations of each method (GET, POST, etc). +// - The number of HTTP operations with no OperationId value. +// - The parameter types used and their frequencies. +// - The response types used and their frequencies. +// - The types used in definition objects and arrays and their frequencies. +// Results are returned in a JSON structure. +package main + +import ( + "encoding/json" + "os" + "path" + "strings" + + "github.com/golang/protobuf/proto" + plugins "github.com/googleapis/gnostic/plugins" + "github.com/googleapis/gnostic/plugins/gnostic-analyze/statistics" +) + +// Record an error, then serialize and return a response. +func sendAndExitIfError(err error, response *plugins.Response) { + if err != nil { + response.Errors = append(response.Errors, err.Error()) + sendAndExit(response) + } +} + +// Serialize and return a response. +func sendAndExit(response *plugins.Response) { + responseBytes, _ := proto.Marshal(response) + os.Stdout.Write(responseBytes) + os.Exit(0) +} + +// This is the main function for the plugin. +func main() { + env, err := plugins.NewEnvironment() + env.RespondAndExitIfError(err) + + var stats *statistics.DocumentStatistics + if env.Request.Openapi2 != nil { + // Analyze the API document. + stats = statistics.NewDocumentStatistics(env.Request.SourceName, env.Request.Openapi2) + } + + if env.Request.Openapi3 != nil { + // Analyze the API document. + stats = statistics.NewDocumentStatisticsV3(env.Request.SourceName, env.Request.Openapi3) + } + + if stats != nil { + // Return the analysis results with an appropriate filename. + // Results are in files named "summary.json" in the same relative + // locations as the description source files. + file := &plugins.File{} + file.Name = strings.Replace(stats.Name, path.Base(stats.Name), "summary.json", -1) + file.Data, err = json.MarshalIndent(stats, "", " ") + file.Data = append(file.Data, []byte("\n")...) + env.RespondAndExitIfError(err) + env.Response.Files = append(env.Response.Files, file) + } + + env.RespondAndExit() +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-analyze/statistics/statsv2.go b/vendor/github.com/googleapis/gnostic/plugins/gnostic-analyze/statistics/statsv2.go new file mode 100644 index 000000000..e6d095790 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-analyze/statistics/statsv2.go @@ -0,0 +1,331 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package statistics + +import ( + "fmt" + "strings" + + openapi "github.com/googleapis/gnostic/OpenAPIv2" +) + +// DocumentStatistics contains information collected about an API description. +type DocumentStatistics struct { + Name string `json:"name"` + Title string `json:"title"` + Operations map[string]int `json:"operations"` + DefinitionCount int `json:"definitions"` + ParameterTypes map[string]int `json:"parameterTypes"` + ResultTypes map[string]int `json:"resultTypes"` + DefinitionFieldTypes map[string]int `json:"definitionFieldTypes"` + DefinitionArrayTypes map[string]int `json:"definitionArrayTypes"` + DefinitionPrimitiveTypes map[string]int `json:"definitionPrimitiveTypes"` + AnonymousOperations []string `json:"anonymousOperations"` + AnonymousObjects []string `json:"anonymousObjects"` +} + +// NewDocumentStatistics builds a new DocumentStatistics object. +func NewDocumentStatistics(source string, document *openapi.Document) *DocumentStatistics { + s := &DocumentStatistics{} + s.Operations = make(map[string]int, 0) + s.ParameterTypes = make(map[string]int, 0) + s.ResultTypes = make(map[string]int, 0) + s.DefinitionFieldTypes = make(map[string]int, 0) + s.DefinitionArrayTypes = make(map[string]int, 0) + s.DefinitionPrimitiveTypes = make(map[string]int, 0) + s.AnonymousOperations = make([]string, 0) + s.AnonymousObjects = make([]string, 0) + s.analyzeDocument(source, document) + return s +} + +func (s *DocumentStatistics) addOperation(name string) { + s.Operations[name] = s.Operations[name] + 1 +} + +func (s *DocumentStatistics) addParameterType(path string, name string) { + if strings.Contains(name, "object") { + s.AnonymousObjects = append(s.AnonymousObjects, path) + } + s.ParameterTypes[name] = s.ParameterTypes[name] + 1 +} + +func (s *DocumentStatistics) addResultType(path string, name string) { + if strings.Contains(name, "object") { + s.AnonymousObjects = append(s.AnonymousObjects, path) + } + s.ResultTypes[name] = s.ResultTypes[name] + 1 +} + +func (s *DocumentStatistics) addDefinitionFieldType(path string, name string) { + if strings.Contains(name, "object") { + s.AnonymousObjects = append(s.AnonymousObjects, path) + } + s.DefinitionFieldTypes[name] = s.DefinitionFieldTypes[name] + 1 +} + +func (s *DocumentStatistics) addDefinitionArrayType(path string, name string) { + if strings.Contains(name, "object") { + s.AnonymousObjects = append(s.AnonymousObjects, path) + } + s.DefinitionArrayTypes[name] = s.DefinitionArrayTypes[name] + 1 +} + +func (s *DocumentStatistics) addDefinitionPrimitiveType(path string, name string) { + s.DefinitionPrimitiveTypes[name] = s.DefinitionPrimitiveTypes[name] + 1 +} + +func typeForPrimitivesItems(p *openapi.PrimitivesItems) string { + switch { + case p == nil: + return "object" + case p.Type != "": + return p.Type + case p.Items != nil && p.Items.Type != "": + return p.Items.Type + default: + return "object" + } +} + +func (s *DocumentStatistics) analyzeOperation(method string, path string, operation *openapi.Operation) { + s.addOperation(method) + s.addOperation("total") + if operation.OperationId == "" { + s.addOperation("anonymous") + s.AnonymousOperations = append(s.AnonymousOperations, path) + } + for _, parameter := range operation.Parameters { + p := parameter.GetParameter() + if p != nil { + b := p.GetBodyParameter() + if b != nil { + typeName := typeForSchema(b.Schema) + s.addParameterType(path+"/"+b.Name, typeName) + } + n := p.GetNonBodyParameter() + if n != nil { + hp := n.GetHeaderParameterSubSchema() + if hp != nil { + t := hp.Type + if t == "array" { + t += "-of-" + typeForPrimitivesItems(hp.Items) + } + s.addParameterType(path+"/"+hp.Name, t) + } + fp := n.GetFormDataParameterSubSchema() + if fp != nil { + t := fp.Type + if t == "array" { + t += "-of-" + typeForPrimitivesItems(fp.Items) + } + s.addParameterType(path+"/"+fp.Name, t) + } + qp := n.GetQueryParameterSubSchema() + if qp != nil { + t := qp.Type + if t == "array" { + t += "-of-" + typeForPrimitivesItems(qp.Items) + } + s.addParameterType(path+"/"+qp.Name, t) + } + pp := n.GetPathParameterSubSchema() + if pp != nil { + t := pp.Type + if t == "array" { + if t == "array" { + t += "-of-" + typeForPrimitivesItems(pp.Items) + } + } + s.addParameterType(path+"/"+pp.Name, t) + } + } + } + r := parameter.GetJsonReference() + if r != nil { + s.addParameterType(path+"/", "reference") + } + } + + for _, pair := range operation.Responses.ResponseCode { + value := pair.Value + response := value.GetResponse() + if response != nil { + responseSchema := response.Schema + responseSchemaSchema := responseSchema.GetSchema() + if responseSchemaSchema != nil { + s.addResultType(path+"/responses/"+pair.Name, typeForSchema(responseSchemaSchema)) + } + responseFileSchema := responseSchema.GetFileSchema() + if responseFileSchema != nil { + s.addResultType(path+"/responses/"+pair.Name, typeForFileSchema(responseFileSchema)) + } + } + ref := value.GetJsonReference() + if ref != nil { + } + } + +} + +// Analyze a definition in an OpenAPI description. +// Collect information about the definition type and any subsidiary types, +// such as the types of object fields or array elements. +func (s *DocumentStatistics) analyzeDefinition(path string, definition *openapi.Schema) { + s.DefinitionCount++ + typeName := typeNameForSchema(definition) + switch typeName { + case "object": + if definition.Properties != nil { + for _, pair := range definition.Properties.AdditionalProperties { + propertySchema := pair.Value + propertyType := typeForSchema(propertySchema) + s.addDefinitionFieldType(path+"/"+pair.Name, propertyType) + } + } + case "array": + s.addDefinitionArrayType(path+"/", typeForSchema(definition)) + default: // string, boolean, integer, number, null... + s.addDefinitionPrimitiveType(path+"/", typeName) + } +} + +// Analyze an OpenAPI description. +// Collect information about types used in the API. +// This should be called exactly once per DocumentStatistics object. +func (s *DocumentStatistics) analyzeDocument(source string, document *openapi.Document) { + s.Name = source + + s.Title = document.Info.Title + for _, pair := range document.Paths.Path { + path := pair.Value + if path.Get != nil { + s.analyzeOperation("get", "paths"+pair.Name+"/get", path.Get) + } + if path.Post != nil { + s.analyzeOperation("post", "paths"+pair.Name+"/post", path.Post) + } + if path.Put != nil { + s.analyzeOperation("put", "paths"+pair.Name+"/put", path.Put) + } + if path.Delete != nil { + s.analyzeOperation("delete", "paths"+pair.Name+"/delete", path.Delete) + } + } + if document.Definitions != nil { + for _, pair := range document.Definitions.AdditionalProperties { + definition := pair.Value + s.analyzeDefinition("definitions/"+pair.Name, definition) + } + } +} + +// helpers + +func typeNameForSchema(schema *openapi.Schema) string { + typeName := "object" // default type + if schema.Type != nil && len(schema.Type.Value) > 0 { + typeName = "" + for i, name := range schema.Type.Value { + if i > 0 { + typeName += "|" + } + typeName += name + } + } + return typeName +} + +// Return a type name to use for a schema. +func typeForSchema(schema *openapi.Schema) string { + if schema.XRef != "" { + return "reference" + } + if len(schema.Enum) > 0 { + enumType := typeNameForSchema(schema) + return "enum-of-" + enumType + } + typeName := typeNameForSchema(schema) + if typeName == "array" { + if schema.Items != nil { + // items contains an array of schemas + itemType := "" + for i, itemSchema := range schema.Items.Schema { + if i > 0 { + itemType += "|" + } + itemType += typeForSchema(itemSchema) + } + return "array-of-" + itemType + } else if schema.XRef != "" { + return "array-of-reference" + } else { + // we need to do more work to understand this type + return fmt.Sprintf("array-of-[%+v]", schema) + } + } else if typeName == "object" { + // this object might be representable with a map + // but not if it has properties + if (schema.Properties != nil) && (len(schema.Properties.AdditionalProperties) > 0) { + return typeName + } + if schema.AdditionalProperties != nil { + if schema.AdditionalProperties.GetSchema() != nil { + additionalPropertiesSchemaType := typeForSchema(schema.AdditionalProperties.GetSchema()) + return "map-of-" + additionalPropertiesSchemaType + } + if schema.AdditionalProperties.GetBoolean() == false { + // no additional properties are allowed, so we're not sure what to do if we get here... + return typeName + } + } + if schema.Items != nil { + itemType := "" + for i, itemSchema := range schema.Items.Schema { + if i > 0 { + itemType += "|" + } + itemType += typeForSchema(itemSchema) + } + return "map-of-" + itemType + } + return "map-of-object" + } else { + return typeName + } +} + +func typeForFileSchema(schema *openapi.FileSchema) string { + if schema.Type != "" { + value := schema.Type + switch value { + case "boolean": + return "fileschema-" + value + case "string": + return "fileschema-" + value + case "file": + return "fileschema-" + value + case "number": + return "fileschema-" + value + case "integer": + return "fileschema-" + value + case "object": + return "fileschema-" + value + case "null": + return "fileschema-" + value + } + } + return fmt.Sprintf("FILE SCHEMA %+v", schema) +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-analyze/statistics/statsv3.go b/vendor/github.com/googleapis/gnostic/plugins/gnostic-analyze/statistics/statsv3.go new file mode 100644 index 000000000..1634933d7 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-analyze/statistics/statsv3.go @@ -0,0 +1,127 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package statistics + +import ( + openapi "github.com/googleapis/gnostic/OpenAPIv3" +) + +// NewDocumentStatistics builds a new DocumentStatistics object. +func NewDocumentStatisticsV3(source string, document *openapi.Document) *DocumentStatistics { + s := &DocumentStatistics{} + s.Operations = make(map[string]int, 0) + s.ParameterTypes = make(map[string]int, 0) + s.ResultTypes = make(map[string]int, 0) + s.DefinitionFieldTypes = make(map[string]int, 0) + s.DefinitionArrayTypes = make(map[string]int, 0) + s.DefinitionPrimitiveTypes = make(map[string]int, 0) + s.AnonymousOperations = make([]string, 0) + s.AnonymousObjects = make([]string, 0) + // TODO + //s.analyzeDocumentV3(source, document) + return s +} + +/* +func (s *DocumentStatistics) analyzeOperationV3(method string, path string, operation *openapi.Operation) { + s.addOperation(method) + s.addOperation("total") + if operation.OperationId == "" { + s.addOperation("anonymous") + s.AnonymousOperations = append(s.AnonymousOperations, path) + } + for _, parametersItem := range operation.Parameters { + p := parametersItem.GetParameter() + if p != nil { + typeName := typeNameForSchemaOrReferenceV3(p.Schema) + s.addParameterType(path+"/"+p.Name, typeName) + } + } + + for _, pair := range *(operation.Responses.Responses) { + value := pair.Value + response := value.GetResponse() + if response != nil { + responseSchema := response.Schema + responseSchemaSchema := responseSchema.GetSchema() + if responseSchemaSchema != nil { + s.addResultType(path+"/responses/"+pair.Name, typeForSchema(responseSchemaSchema)) + } + responseFileSchema := responseSchema.GetFileSchema() + if responseFileSchema != nil { + s.addResultType(path+"/responses/"+pair.Name, typeForFileSchema(responseFileSchema)) + } + } + ref := value.GetJsonReference() + if ref != nil { + } + } + +} + +// Analyze a definition in an OpenAPI description. +// Collect information about the definition type and any subsidiary types, +// such as the types of object fields or array elements. +func (s *DocumentStatistics) analyzeDefinitionV3(path string, definition *openapi.Schema) { + s.DefinitionCount++ + typeName := typeNameForSchemaV3(definition) + switch typeName { + case "object": + if definition.Properties != nil { + for _, pair := range definition.Properties.AdditionalProperties { + propertySchema := pair.Value + propertyType := typeForSchemaV3(propertySchema) + s.addDefinitionFieldType(path+"/"+pair.Name, propertyType) + } + } + case "array": + s.addDefinitionArrayType(path+"/", typeForSchemaV3(definition)) + default: // string, boolean, integer, number, null... + s.addDefinitionPrimitiveType(path+"/", typeName) + } +} + +// Analyze an OpenAPI description. +// Collect information about types used in the API. +// This should be called exactly once per DocumentStatistics object. +func (s *DocumentStatistics) analyzeDocumentV3(source string, document *openapi.Document) { + s.Name = source + + s.Title = document.Info.Title + for _, pair := range document.Paths.Path { + path := pair.Value + if path.Get != nil { + s.analyzeOperation("get", "paths"+pair.Name+"/get", path.Get) + } + if path.Post != nil { + s.analyzeOperation("post", "paths"+pair.Name+"/post", path.Post) + } + if path.Put != nil { + s.analyzeOperation("put", "paths"+pair.Name+"/put", path.Put) + } + if path.Delete != nil { + s.analyzeOperation("delete", "paths"+pair.Name+"/delete", path.Delete) + } + } + if document.Components.Schemas != nil { + for _, pair := range document.Components.Schemas.AdditionalProperties { + definition := pair.Value + if definition.GetSchema() != nil { + s.analyzeDefinition("definitions/"+pair.Name, definition.GetSchema()) + } + } + } +} +*/ diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-analyze/summarize/main.go b/vendor/github.com/googleapis/gnostic/plugins/gnostic-analyze/summarize/main.go new file mode 100644 index 000000000..69adad9ed --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-analyze/summarize/main.go @@ -0,0 +1,156 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// summarize is a tool for summarizing the results of gnostic_analyze runs. +package main + +import ( + "encoding/json" + "fmt" + "io/ioutil" + "os" + "path" + "path/filepath" + "sort" + + "github.com/googleapis/gnostic/plugins/gnostic-analyze/statistics" +) + +// Results are collected in this global slice. +var stats []statistics.DocumentStatistics + +// walker is called for each summary file found. +func walker(p string, info os.FileInfo, err error) error { + basename := path.Base(p) + if basename != "summary.json" { + return nil + } + data, err := ioutil.ReadFile(p) + if err != nil { + return err + } + var s statistics.DocumentStatistics + err = json.Unmarshal(data, &s) + if err != nil { + return err + } + stats = append(stats, s) + return nil +} + +func printFrequencies(m map[string]int) { + for _, pair := range rankByCount(m) { + fmt.Printf("%6d %s\n", pair.Value, pair.Key) + } +} + +func rankByCount(frequencies map[string]int) pairList { + pl := make(pairList, len(frequencies)) + i := 0 + for k, v := range frequencies { + pl[i] = pair{k, v} + i++ + } + sort.Sort(sort.Reverse(pl)) + return pl +} + +type pair struct { + Key string + Value int +} + +type pairList []pair + +func (p pairList) Len() int { return len(p) } +func (p pairList) Less(i, j int) bool { return p[i].Value < p[j].Value } +func (p pairList) Swap(i, j int) { p[i], p[j] = p[j], p[i] } + +func main() { + // Collect all statistics in the current directory and its subdirectories. + stats = make([]statistics.DocumentStatistics, 0) + filepath.Walk(".", walker) + + // Compute some interesting properties. + apisWithAnonymousOperations := 0 + apisWithAnonymousObjects := 0 + apisWithAnonymousAnything := 0 + opFrequencies := make(map[string]int, 0) + parameterTypeFrequencies := make(map[string]int, 0) + resultTypeFrequencies := make(map[string]int, 0) + definitionFieldTypeFrequencies := make(map[string]int, 0) + definitionArrayTypeFrequencies := make(map[string]int, 0) + definitionPrimitiveTypeFrequencies := make(map[string]int, 0) + + for _, api := range stats { + if api.Operations["anonymous"] != 0 { + apisWithAnonymousOperations++ + } + if len(api.AnonymousObjects) > 0 { + apisWithAnonymousObjects++ + } + if len(api.AnonymousOperations) > 0 { + apisWithAnonymousAnything++ + if len(api.AnonymousObjects) > 0 { + fmt.Printf("%s has anonymous operations and objects\n", api.Name) + } else { + fmt.Printf("%s has anonymous operations\n", api.Name) + } + } else { + if len(api.AnonymousObjects) > 0 { + apisWithAnonymousAnything++ + fmt.Printf("%s has anonymous objects\n", api.Name) + } else { + fmt.Printf("%s has no anonymous operations or objects\n", api.Name) + } + } + for k, v := range api.Operations { + opFrequencies[k] += v + } + for k, v := range api.ParameterTypes { + parameterTypeFrequencies[k] += v + } + for k, v := range api.ResultTypes { + resultTypeFrequencies[k] += v + } + for k, v := range api.DefinitionFieldTypes { + definitionFieldTypeFrequencies[k] += v + } + for k, v := range api.DefinitionArrayTypes { + definitionArrayTypeFrequencies[k] += v + } + for k, v := range api.DefinitionPrimitiveTypes { + definitionPrimitiveTypeFrequencies[k] += v + } + } + + // Report the results. + fmt.Printf("\n") + fmt.Printf("Collected information on %d APIs.\n\n", len(stats)) + fmt.Printf("APIs with anonymous operations: %d\n", apisWithAnonymousOperations) + fmt.Printf("APIs with anonymous objects: %d\n", apisWithAnonymousObjects) + fmt.Printf("APIs with anonymous anything: %d\n", apisWithAnonymousAnything) + fmt.Printf("\nOperation frequencies:\n") + printFrequencies(opFrequencies) + fmt.Printf("\nParameter type frequencies:\n") + printFrequencies(parameterTypeFrequencies) + fmt.Printf("\nResult type frequencies:\n") + printFrequencies(resultTypeFrequencies) + fmt.Printf("\nDefinition object field type frequencies:\n") + printFrequencies(definitionFieldTypeFrequencies) + fmt.Printf("\nDefinition array type frequencies:\n") + printFrequencies(definitionArrayTypeFrequencies) + fmt.Printf("\nDefinition primitive type frequencies:\n") + printFrequencies(definitionPrimitiveTypeFrequencies) +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/Makefile b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/Makefile new file mode 100644 index 000000000..f6c25026f --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/Makefile @@ -0,0 +1,9 @@ + +build: + go get golang.org/x/tools/cmd/goimports + go install github.com/googleapis/gnostic + go install github.com/googleapis/gnostic/plugins/gnostic-go-generator + rm -f $(GOPATH)/bin/gnostic-go-client $(GOPATH)/bin/gnostic-go-server + ln -s $(GOPATH)/bin/gnostic-go-generator $(GOPATH)/bin/gnostic-go-client + ln -s $(GOPATH)/bin/gnostic-go-generator $(GOPATH)/bin/gnostic-go-server + diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/README.md b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/README.md new file mode 100644 index 000000000..bb7142f72 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/README.md @@ -0,0 +1,18 @@ +# Go Generator Plugin + +This directory contains a `gnostic` plugin that can be used to generate a Go client library and scaffolding for a Go server for an API with an OpenAPI description. + +The plugin can be invoked like this: + + gnostic bookstore.json --go-generator-out=bookstore + +`bookstore` is the name of a directory where the generated code will be written. +`bookstore` will also be the package name used for generated code. + +By default, both client and server code will be generated. If the `gnostic-go-generator` binary is also linked from the names `gnostic-go-client` and `gnostic-go-server`, then only client or only server code can be generated as follows: + + gnostic bookstore.json --go-client-out=bookstore + + gnostic bookstore.json --go-server-out=bookstore + +For example usage, see the [examples/v2.0/bookstore](examples/v2.0/bookstore) directory. \ No newline at end of file diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/googleauth/README.md b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/googleauth/README.md new file mode 100644 index 000000000..e86755151 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/googleauth/README.md @@ -0,0 +1,31 @@ +# googleauth + +This directory contains support code that can be used to get an OAuth2 token for a Google API user. + +It is designed to work on computers with attached displays. +Use it to write command-line tools and test programs that call Google APIs. + +## Instructions + +Import this package and make the following call to request a token. + + client, err := googleauth.NewOAuth2Client(scopes) + +`scopes` should be a string containing the OAuth scopes needed by the APIs to be called. +For example, the URL Shortener API would require "https://www.googleapis.com/auth/urlshortener". + +This call will then open a local browser that will redirect to a Google signin page +with information about the app that is requesting a token. + +## Application Credentials + +To use this package, you need to download a "client secrets" file and +save it as `client_secrets.json` in the directory where your tool is run. + +To get this file, visit the {{ Google Cloud Console }}{{ https://cloud.google.com/console }} +and create a project. Then go to the API Manager to enable the APIs that you want to use +and create OAuth2 credentials. You'll then be able to download these credentials +as JSON. Save this file as `client_secrets.json` + +For more information about the `client_secrets.json` file format, please visit: +https://developers.google.com/api-client-library/python/guide/aaa_client_secrets \ No newline at end of file diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/googleauth/googleauth.go b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/googleauth/googleauth.go new file mode 100644 index 000000000..e87d89d86 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/googleauth/googleauth.go @@ -0,0 +1,220 @@ +// +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package googleauth + +import ( + "encoding/json" + "errors" + "flag" + "fmt" + "io/ioutil" + "net" + "net/http" + "os" + "os/exec" + "path/filepath" + "runtime" + + "golang.org/x/net/context" + "golang.org/x/oauth2" +) + +const missingClientSecretsMessage = ` +Please configure OAuth 2.0 + +To make this sample run, you need to populate the client_secrets.json file +found at: + + %v + +with information from the {{ Google Cloud Console }} +{{ https://cloud.google.com/console }} + +For more information about the client_secrets.json file format, please visit: +https://developers.google.com/api-client-library/python/guide/aaa_client_secrets +` + +var ( + clientSecretsFile = flag.String("secrets", "client_secrets.json", "Client Secrets configuration") + cacheFile = flag.String("cache", "request.token", "Token cache file") +) + +// ClientConfig is a data structure definition for the client_secrets.json file. +// The code unmarshals the JSON configuration file into this structure. +type ClientConfig struct { + ClientID string `json:"client_id"` + ClientSecret string `json:"client_secret"` + RedirectURIs []string `json:"redirect_uris"` + AuthURI string `json:"auth_uri"` + TokenURI string `json:"token_uri"` +} + +// Config is a root-level configuration object. +type Config struct { + Installed ClientConfig `json:"installed"` + Web ClientConfig `json:"web"` +} + +// openURL opens a browser window to the specified location. +// This code originally appeared at: +// http://stackoverflow.com/questions/10377243/how-can-i-launch-a-process-that-is-not-a-file-in-go +func openURL(url string) error { + var err error + switch runtime.GOOS { + case "linux": + err = exec.Command("xdg-open", url).Start() + case "windows": + err = exec.Command("rundll32", "url.dll,FileProtocolHandler", "http://localhost:4001/").Start() + case "darwin": + err = exec.Command("open", url).Start() + default: + err = fmt.Errorf("Cannot open URL %s on this platform", url) + } + return err +} + +// readConfig reads the configuration from clientSecretsFile. +// It returns an oauth configuration object for use with the Google API client. +func readConfig(scopes []string) (*oauth2.Config, error) { + // Read the secrets file + data, err := ioutil.ReadFile(*clientSecretsFile) + if err != nil { + pwd, _ := os.Getwd() + fullPath := filepath.Join(pwd, *clientSecretsFile) + return nil, fmt.Errorf(missingClientSecretsMessage, fullPath) + } + + cfg := new(Config) + err = json.Unmarshal(data, &cfg) + if err != nil { + return nil, err + } + + var redirectURI string + if len(cfg.Web.RedirectURIs) > 0 { + redirectURI = cfg.Web.RedirectURIs[0] + } else if len(cfg.Installed.RedirectURIs) > 0 { + redirectURI = cfg.Installed.RedirectURIs[0] + } else { + return nil, errors.New("Must specify a redirect URI in config file or when creating OAuth client") + } + + return &oauth2.Config{ + ClientID: cfg.Installed.ClientID, + ClientSecret: cfg.Installed.ClientSecret, + Scopes: scopes, + Endpoint: oauth2.Endpoint{cfg.Installed.AuthURI, cfg.Installed.TokenURI}, + RedirectURL: redirectURI, + }, nil +} + +// startWebServer starts a web server that listens on http://localhost:8080. +// The webserver waits for an oauth code in the three-legged auth flow. +func startWebServer() (codeCh chan string, err error) { + listener, err := net.Listen("tcp", "localhost:8080") + if err != nil { + return nil, err + } + codeCh = make(chan string) + go http.Serve(listener, http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + code := r.FormValue("code") + codeCh <- code // send code to OAuth flow + listener.Close() + w.Header().Set("Content-Type", "text/plain") + fmt.Fprintf(w, "Received code: %v\r\nYou can now safely close this browser window.", code) + })) + return codeCh, nil +} + +// NewOAuth2Client takes the user through the three-legged OAuth flow. +// It opens a browser in the native OS or outputs a URL, then blocks until +// the redirect completes to the /oauth2callback URI. +// It returns an instance of an HTTP client that can be passed to the +// constructor of an OAuth client. +// scopes is a variable number of OAuth scopes +func NewOAuth2Client(scopes ...string) (*http.Client, error) { + var ctx context.Context + tokenSource, err := NewOAuth2TokenSource(scopes...) + if err == nil { + return oauth2.NewClient(ctx, tokenSource), nil + } + return nil, err +} + +// NewOAuth2TokenSource takes the user through the three-legged OAuth flow. +// It opens a browser in the native OS or outputs a URL, then blocks until +// the redirect completes to the /oauth2callback URI. +// It returns an instance of an OAuth token source that can be passed to the +// constructor of an OAuth client. +// scopes is a variable number of OAuth scopes +func NewOAuth2TokenSource(scopes ...string) (oauth2.TokenSource, error) { + config, err := readConfig(scopes) + if err != nil { + msg := fmt.Sprintf("Cannot read configuration file: %v", err) + return nil, errors.New(msg) + } + + var ctx context.Context + + // Try to read the token from the cache file. + // If an error occurs, do the three-legged OAuth flow because + // the token is invalid or doesn't exist. + //token, err := config.TokenCache.Token() + + var token *oauth2.Token + + data, err := ioutil.ReadFile(*cacheFile) + if err == nil { + err = json.Unmarshal(data, &token) + } + if (err != nil) || !token.Valid() { + // Start web server. + // This is how this program receives the authorization code + // when the browser redirects. + codeCh, err := startWebServer() + if err != nil { + return nil, err + } + + // Open url in browser + url := config.AuthCodeURL("") + err = openURL(url) + if err != nil { + fmt.Println("Visit the URL below to get a code.", + " This program will pause until the site is visted.") + } else { + fmt.Println("Your browser has been opened to an authorization URL.", + " This program will resume once authorization has been provided.\n") + } + fmt.Println(url) + + // Wait for the web server to get the code. + code := <-codeCh + + // This code caches the authorization code on the local + // filesystem, if necessary, as long as the TokenCache + // attribute in the config is set. + token, err = config.Exchange(ctx, code) + if err != nil { + return nil, err + } + + data, err := json.Marshal(token) + ioutil.WriteFile(*cacheFile, data, 0644) + } + return oauth2.StaticTokenSource(token), nil +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/apis_guru/Makefile b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/apis_guru/Makefile new file mode 100644 index 000000000..d1c57accd --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/apis_guru/Makefile @@ -0,0 +1,4 @@ + +all: + gnostic swagger.yaml --go-client-out=apis_guru + go install diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/apis_guru/main.go b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/apis_guru/main.go new file mode 100644 index 000000000..2a2b35451 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/apis_guru/main.go @@ -0,0 +1,41 @@ +package main + +import ( + "fmt" + "github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/apis_guru/apis_guru" + "sort" +) + +func main() { + c := apis_guru.NewClient("http://api.apis.guru/v2") + + metrics, err := c.GetMetrics() + if err != nil { + panic(err) + } + fmt.Printf("%+v\n", metrics) + + apis, err := c.ListAPIs() + if err != nil { + panic(err) + } + + keys := make([]string, 0) + for key, _ := range *apis.OK { + keys = append(keys, key) + } + sort.Strings(keys) + + for _, key := range keys { + api := (*apis.OK)[key] + versions := make([]string, 0) + for key, _ := range api.Versions { + versions = append(versions, key) + } + sort.Strings(versions) + fmt.Printf("[%s]:%+v\n", key, versions) + } + + api := (*apis.OK)["xkcd.com"].Versions["1.0.0"] + fmt.Printf("%+v\n", api.SwaggerUrl) +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/apis_guru/swagger.yaml b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/apis_guru/swagger.yaml new file mode 100644 index 000000000..40e939966 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/apis_guru/swagger.yaml @@ -0,0 +1,186 @@ +swagger: '2.0' +schemes: + - https +host: api.apis.guru +basePath: /v2/ +info: + contact: + email: founders@apis.guru + name: APIs.guru + url: 'http://APIs.guru' + description: | + Wikipedia for Web APIs. Repository of API specs in OpenAPI(fka Swagger) 2.0 format. + + **Warning**: If you want to be notified about changes in advance please subscribe to our [Gitter channel](https://gitter.im/APIs-guru/api-models). + + Client sample: [[Demo]](https://apis.guru/simple-ui) [[Repo]](https://github.com/APIs-guru/simple-ui) + license: + name: CC0 1.0 + url: 'https://github.com/APIs-guru/api-models#licenses' + title: APIs.guru + version: '2.0' + x-logo: + url: 'https://apis.guru/branding/logo_vertical.svg' +externalDocs: + url: 'https://github.com/APIs-guru/api-models/blob/master/API.md' +produces: + - application/json +security: [] +paths: + /list.json: + get: + description: | + List all APIs in the directory. + Returns links to OpenAPI specification for each API in the directory. + If API exist in multiply versions `preferred` one is explicitly marked. + + Some basic info from OpenAPI spec is cached inside each object. + This allows to generate some simple views without need to fetch OpenAPI spec for each API. + operationId: listAPIs + responses: + '200': + description: OK + schema: + $ref: '#/definitions/APIs' + summary: List all APIs + /metrics.json: + get: + description: | + Some basic metrics for the entire directory. + Just stunning numbers to put on a front page and are intended purely for WoW effect :) + operationId: getMetrics + responses: + '200': + description: OK + schema: + $ref: '#/definitions/Metrics' + summary: Get basic metrics +definitions: + API: + additionalProperties: false + description: Meta information about API + properties: + added: + description: Timestamp when the API was first added to the directory + format: date-time + type: string + preferred: + description: Recommended version + type: string + versions: + additionalProperties: + $ref: '#/definitions/ApiVersion' + description: List of supported versions of the API + minProperties: 1 + type: object + required: + - added + - preferred + - versions + type: object + APIs: + additionalProperties: + $ref: '#/definitions/API' + description: | + List of API details. + It is a JSON object with API IDs(`[:]`) as keys. + example: + 'googleapis.com:drive': + added: '2015-02-22T20:00:45.000Z' + preferred: v3 + versions: + v2: + added: '2015-02-22T20:00:45.000Z' + info: + title: Drive + version: v2 + x-apiClientRegistration: + url: 'https://console.developers.google.com' + x-logo: + url: 'https://api.apis.guru/v2/cache/logo/https_www.gstatic.com_images_icons_material_product_2x_drive_32dp.png' + x-origin: + format: google + url: 'https://www.googleapis.com/discovery/v1/apis/drive/v2/rest' + version: v1 + x-preferred: false + x-providerName: googleapis.com + x-serviceName: drive + swaggerUrl: 'https://api.apis.guru/v2/specs/googleapis.com/drive/v2/swagger.json' + swaggerYamlUrl: 'https://api.apis.guru/v2/specs/googleapis.com/drive/v2/swagger.yaml' + updated: '2016-06-17T00:21:44.000Z' + v3: + added: '2015-12-12T00:25:13.000Z' + info: + title: Drive + version: v3 + x-apiClientRegistration: + url: 'https://console.developers.google.com' + x-logo: + url: 'https://api.apis.guru/v2/cache/logo/https_www.gstatic.com_images_icons_material_product_2x_drive_32dp.png' + x-origin: + format: google + url: 'https://www.googleapis.com/discovery/v1/apis/drive/v3/rest' + version: v1 + x-preferred: true + x-providerName: googleapis.com + x-serviceName: drive + swaggerUrl: 'https://api.apis.guru/v2/specs/googleapis.com/drive/v3/swagger.json' + swaggerYamlUrl: 'https://api.apis.guru/v2/specs/googleapis.com/drive/v3/swagger.yaml' + updated: '2016-06-17T00:21:44.000Z' + minProperties: 1 + type: object + ApiVersion: + additionalProperties: false + properties: + added: + description: Timestamp when the version was added + format: date-time + type: string + info: + description: Copy of `info` section from Swagger spec + minProperties: 1 + type: object + swaggerUrl: + description: URL to Swagger spec in JSON format + format: url + type: string + swaggerYamlUrl: + description: URL to Swagger spec in YAML format + format: url + type: string + updated: + description: Timestamp when the version was updated + format: date-time + type: string + required: + - added + - updated + - swaggerUrl + - swaggerYamlUrl + - info + type: object + Metrics: + additionalProperties: false + description: List of basic metrics + example: + numAPIs: 238 + numEndpoints: 6448 + numSpecs: 302 + properties: + numAPIs: + description: Number of APIs + minimum: 1 + type: integer + numEndpoints: + description: Total number of endpoints inside all specifications + minimum: 1 + type: integer + numSpecs: + description: Number of API specifications including different versions of the same API + minimum: 1 + type: integer + required: + - numSpecs + - numAPIs + - numEndpoints + type: object diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/bookstore/Makefile b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/bookstore/Makefile new file mode 100644 index 000000000..6d04686a3 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/bookstore/Makefile @@ -0,0 +1,20 @@ +build: + go get golang.org/x/tools/cmd/goimports + go install github.com/googleapis/gnostic + go install github.com/googleapis/gnostic/plugins/gnostic-go-generator + rm -f $(GOPATH)/bin/gnostic-go-client $(GOPATH)/bin/gnostic-go-server + ln -s $(GOPATH)/bin/gnostic-go-generator $(GOPATH)/bin/gnostic-go-client + ln -s $(GOPATH)/bin/gnostic-go-generator $(GOPATH)/bin/gnostic-go-server + +all: build + gnostic bookstore.json --go-generator-out=bookstore + +clean: + rm -rf bookstore bookstore.text service/service + +test: all + killall service; true # ignore errors due to no matching processes + cd service; go get .; go build; ./service & + go test + killall service + diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/bookstore/README.md b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/bookstore/README.md new file mode 100644 index 000000000..a684b3af6 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/bookstore/README.md @@ -0,0 +1,23 @@ +# Bookstore Example + +This directory contains an OpenAPI description of a simple bookstore API. + +Use this example to try the `gnostic-go-generator` plugin, which implements +`gnostic-go-client` and `gnostic-go-server` for generating API client and +server code, respectively. + +Run "make all" to build and install `gnostic` and the Go plugins. +It will generate both client and server code. The API client and +server code will be in the `bookstore` package. + +The `service` directory contains additional code that completes the server. +To build and run the service, `cd service` and do the following: + + go get . + go build + ./service & + +To test the service with the generated client, go back up to the top-level +directory and run `go test`. The test in `bookstore_test.go` uses client +code generated in `bookstore` to verify the service. + diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/bookstore/bookstore.json b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/bookstore/bookstore.json new file mode 100644 index 000000000..98571c8a5 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/bookstore/bookstore.json @@ -0,0 +1,357 @@ +{ + "swagger": "2.0", + "info": { + "description": "A simple Bookstore API example.", + "title": "Bookstore", + "version": "1.0.0" + }, + "host": "generated-bookstore.appspot.com", + "basePath": "/", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "schemes": [ + "https" + ], + "paths": { + "/shelves": { + "get": { + "description": "Return all shelves in the bookstore.", + "operationId": "listShelves", + "produces": [ + "application/json" + ], + "responses": { + "200": { + "description": "List of shelves in the bookstore.", + "schema": { + "$ref": "#/definitions/listShelvesResponse" + } + } + }, + "security": [ + + ] + }, + "post": { + "description": "Create a new shelf in the bookstore.", + "operationId": "createShelf", + "parameters": [ + { + "description": "A shelf resource to create.", + "in": "body", + "name": "shelf", + "required": true, + "schema": { + "$ref": "#/definitions/shelf" + } + } + ], + "produces": [ + "application/json" + ], + "responses": { + "200": { + "description": "A newly created shelf resource.", + "schema": { + "$ref": "#/definitions/shelf" + } + } + } + }, + "delete": { + "description": "Delete all shelves.", + "operationId": "deleteShelves", + "responses": { + "default": { + "description": "An empty response body." + } + } + } + }, + "/shelves/{shelf}": { + "get": { + "description": "Get a single shelf resource with the given ID.", + "operationId": "getShelf", + "parameters": [ + { + "description": "ID of the shelf to get.", + "format": "int64", + "in": "path", + "name": "shelf", + "required": true, + "type": "integer" + } + ], + "produces": [ + "application/json" + ], + "responses": { + "200": { + "description": "A shelf resource.", + "schema": { + "$ref": "#/definitions/shelf" + } + }, + "default": { + "description": "unexpected error", + "schema": { + "$ref": "#/definitions/error" + } + } + } + }, + "delete": { + "description": "Delete a single shelf with the given ID.", + "operationId": "deleteShelf", + "parameters": [ + { + "description": "ID of the shelf to delete.", + "format": "int64", + "in": "path", + "name": "shelf", + "required": true, + "type": "integer" + } + ], + "responses": { + "default": { + "description": "An empty response body." + } + } + } + }, + "/shelves/{shelf}/books": { + "get": { + "description": "Return all books in a shelf with the given ID.", + "operationId": "listBooks", + "parameters": [ + { + "description": "ID of the shelf whose books should be returned.", + "format": "int64", + "in": "path", + "name": "shelf", + "required": true, + "type": "integer" + } + ], + "produces": [ + "application/json" + ], + "responses": { + "200": { + "description": "List of books on the specified shelf.", + "schema": { + "$ref": "#/definitions/listBooksResponse" + } + }, + "default": { + "description": "unexpected error", + "schema": { + "$ref": "#/definitions/error" + } + } + } + }, + "post": { + "description": "Create a new book on the shelf.", + "operationId": "createBook", + "parameters": [ + { + "description": "ID of the shelf where the book should be created.", + "format": "int64", + "in": "path", + "name": "shelf", + "required": true, + "type": "integer" + }, + { + "description": "Book to create.", + "in": "body", + "name": "book", + "required": true, + "schema": { + "$ref": "#/definitions/book" + } + } + ], + "produces": [ + "application/json" + ], + "responses": { + "200": { + "description": "A newly created book resource.", + "schema": { + "$ref": "#/definitions/book" + } + }, + "default": { + "description": "unexpected error", + "schema": { + "$ref": "#/definitions/error" + } + } + } + } + }, + "/shelves/{shelf}/books/{book}": { + "get": { + "description": "Get a single book with a given ID from a shelf.", + "operationId": "getBook", + "parameters": [ + { + "description": "ID of the shelf from which to get the book.", + "format": "int64", + "in": "path", + "name": "shelf", + "required": true, + "type": "integer" + }, + { + "description": "ID of the book to get from the shelf.", + "format": "int64", + "in": "path", + "name": "book", + "required": true, + "type": "integer" + } + ], + "produces": [ + "application/json" + ], + "responses": { + "200": { + "description": "A book resource.", + "schema": { + "$ref": "#/definitions/book" + } + }, + "default": { + "description": "unexpected error", + "schema": { + "$ref": "#/definitions/error" + } + } + } + }, + "delete": { + "description": "Delete a single book with a given ID from a shelf.", + "operationId": "deleteBook", + "parameters": [ + { + "description": "ID of the shelf from which to delete the book.", + "format": "int64", + "in": "path", + "name": "shelf", + "required": true, + "type": "integer" + }, + { + "description": "ID of the book to delete from the shelf.", + "format": "int64", + "in": "path", + "name": "book", + "required": true, + "type": "integer" + } + ], + "responses": { + "default": { + "description": "An empty response body." + } + } + } + } + }, + "definitions": { + "book": { + "properties": { + "author": { + "type": "string" + }, + "name": { + "type": "string" + }, + "title": { + "type": "string" + } + }, + "required": [ + "name", + "author", + "title" + ] + }, + "listBooksResponse": { + "properties": { + "books": { + "items": { + "$ref": "#/definitions/book" + }, + "type": "array" + } + }, + "required": [ + "books" + ], + "type": "object" + }, + "listShelvesResponse": { + "properties": { + "shelves": { + "items": { + "$ref": "#/definitions/shelf" + }, + "type": "array" + } + }, + "type": "object" + }, + "shelf": { + "properties": { + "name": { + "type": "string" + }, + "theme": { + "type": "string" + } + }, + "required": [ + "name", + "theme" + ] + }, + "error": { + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + } + } + } + }, + "security": [ + { + "api_key": [ + + ] + } + ], + "securityDefinitions": { + "api_key": { + "in": "query", + "name": "key", + "type": "apiKey" + } + } +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/bookstore/bookstore_test.go b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/bookstore/bookstore_test.go new file mode 100644 index 000000000..10506a892 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/bookstore/bookstore_test.go @@ -0,0 +1,239 @@ +/* + Copyright 2017 Google Inc. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package test + +import ( + "fmt" + "net/http" + "strings" + "testing" + + "github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/bookstore/bookstore" +) + +const service = "http://localhost:8080" + +//const service = "http://generated-bookstore.appspot.com" + +func TestBookstore(t *testing.T) { + // create a client + b := bookstore.NewClient(service, nil) + // reset the service by deleting all shelves + { + err := b.DeleteShelves() + if err != nil { + t.Log("delete shelves failed") + t.Fail() + } + } + // verify that the service has no shelves + { + response, err := b.ListShelves() + if err != nil { + t.Log("list shelves failed") + t.Fail() + } + if (response == nil) || (response.OK == nil) || (response.OK.Shelves != nil) { + t.Log(fmt.Sprintf("list shelves failed %+v", response.OK)) + t.Log(fmt.Sprintf("list shelves failed len=%d", len(response.OK.Shelves))) + t.Fail() + } + } + // attempting to get a shelf should return an error + { + _, err := b.GetShelf(1) + if err == nil { + t.Log("get shelf failed to return an error") + t.Fail() + } + } + // attempting to get a book should return an error + { + _, err := b.GetBook(1, 2) + if err == nil { + t.Log("get book failed to return an error") + t.Fail() + } + } + // add a shelf + { + var shelf bookstore.Shelf + shelf.Theme = "mysteries" + response, err := b.CreateShelf(shelf) + if err != nil { + t.Log("create shelf mysteries failed") + t.Fail() + } + if (response.OK.Name != "shelves/1") || + (response.OK.Theme != "mysteries") { + t.Log("create shelf mysteries failed") + t.Fail() + } + } + // add another shelf + { + var shelf bookstore.Shelf + shelf.Theme = "comedies" + response, err := b.CreateShelf(shelf) + if err != nil { + t.Log("create shelf comedies failed") + t.Fail() + } + if (response.OK.Name != "shelves/2") || + (response.OK.Theme != "comedies") { + t.Log("create shelf comedies failed") + t.Fail() + } + } + // get the first shelf that was added + { + response, err := b.GetShelf(1) + if err != nil { + t.Log("get shelf mysteries failed") + t.Fail() + } + if (response.OK.Name != "shelves/1") || + (response.OK.Theme != "mysteries") { + t.Log("get shelf mysteries failed") + t.Fail() + } + } + // list shelves and verify that there are 2 + { + response, err := b.ListShelves() + if err != nil { + t.Log("list shelves failed") + t.Fail() + } + if len(response.OK.Shelves) != 2 { + t.Log("list shelves failed") + t.Fail() + } + } + // delete a shelf + { + err := b.DeleteShelf(2) + if err != nil { + t.Log("delete shelf failed") + t.Fail() + } + } + // list shelves and verify that there is only 1 + { + response, err := b.ListShelves() + if err != nil { + t.Log("list shelves failed") + t.Fail() + } + if len(response.OK.Shelves) != 1 { + t.Log("list shelves failed") + t.Fail() + } + } + // list books on a shelf, verify that there are none + { + response, err := b.ListBooks(1) + if err != nil { + t.Log("list books failed") + t.Fail() + } + if len(response.OK.Books) != 0 { + t.Log("list books failed") + t.Fail() + } + } + // create a book + { + var book bookstore.Book + book.Author = "Agatha Christie" + book.Title = "And Then There Were None" + _, err := b.CreateBook(1, book) + if err != nil { + t.Log("create book failed") + t.Fail() + } + } + // create another book + { + var book bookstore.Book + book.Author = "Agatha Christie" + book.Title = "Murder on the Orient Express" + _, err := b.CreateBook(1, book) + if err != nil { + t.Log("create book failed") + t.Fail() + } + } + // get the first book that was added + { + _, err := b.GetBook(1, 1) + if err != nil { + t.Log("get book failed") + t.Fail() + } + } + // list the books on a shelf and verify that there are 2 + { + response, err := b.ListBooks(1) + if err != nil { + t.Log("list books failed") + t.Fail() + } + if len(response.OK.Books) != 2 { + t.Log("list books failed") + t.Fail() + } + } + // delete a book + { + err := b.DeleteBook(1, 2) + if err != nil { + t.Log("delete book failed") + t.Fail() + } + } + // list the books on a shelf and verify that is only 1 + { + response, err := b.ListBooks(1) + if err != nil { + t.Log("list books failed") + t.Fail() + } + if len(response.OK.Books) != 1 { + t.Log("list books failed") + t.Fail() + } + } + // verify the handling of a badly-formed request + { + req, err := http.NewRequest("POST", service+"/shelves", strings.NewReader("")) + if err != nil { + t.Log("bad request failed") + return + } + resp, err := http.DefaultClient.Do(req) + if err != nil { + return + } + // we expect a 400 (Bad Request) code + if resp.StatusCode != 400 { + t.Log("bad request failed") + t.Fail() + } + return + } +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/bookstore/service/app.yaml b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/bookstore/service/app.yaml new file mode 100644 index 000000000..9c262ce19 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/bookstore/service/app.yaml @@ -0,0 +1,9 @@ +application: bookstore +version: 1 +runtime: go +api_version: go1 +handlers: +- url: /.* + script: _go_app +- url: / + static_dir: static diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/bookstore/service/init.go b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/bookstore/service/init.go new file mode 100644 index 000000000..53df81a53 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/bookstore/service/init.go @@ -0,0 +1,27 @@ +/* + Copyright 2017 Google Inc. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package main + +import ( + "github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/bookstore/bookstore" +) + +// init() is called when the package is loaded +// this allows this app to be trivially deployed to Google App Engine, which does not call main() +func init() { + bookstore.Initialize(NewService()) +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/bookstore/service/main.go b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/bookstore/service/main.go new file mode 100644 index 000000000..6a14ae944 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/bookstore/service/main.go @@ -0,0 +1,34 @@ +// +build !appengine + +// This file is omitted when the app is built for Google App Engine + +/* + Copyright 2017 Google Inc. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package main + +import ( + "log" + + "github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/bookstore/bookstore" +) + +func main() { + err := bookstore.ServeHTTP(":8080") + if err != nil { + log.Printf("%v", err) + } +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/bookstore/service/service.go b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/bookstore/service/service.go new file mode 100644 index 000000000..67c128beb --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/bookstore/service/service.go @@ -0,0 +1,195 @@ +/* + Copyright 2017 Google Inc. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package main + +import ( + "errors" + "fmt" + "net/http" + "sync" + + "github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/bookstore/bookstore" +) + +// +// The Service type implements a bookstore service. +// All objects are managed in an in-memory non-persistent store. +// +type Service struct { + // shelves are stored in a map keyed by shelf id + // books are stored in a two level map, keyed first by shelf id and then by book id + Shelves map[int64]*bookstore.Shelf + Books map[int64]map[int64]*bookstore.Book + LastShelfID int64 // the id of the last shelf that was added + LastBookID int64 // the id of the last book that was added + Mutex sync.Mutex // global mutex to synchronize service access +} + +func NewService() *Service { + return &Service{ + Shelves: make(map[int64]*bookstore.Shelf), + Books: make(map[int64]map[int64]*bookstore.Book), + } +} + +func (service *Service) ListShelves(responses *bookstore.ListShelvesResponses) (err error) { + service.Mutex.Lock() + defer service.Mutex.Unlock() + // copy shelf ids from Shelves map keys + shelves := make([]bookstore.Shelf, 0, len(service.Shelves)) + for _, shelf := range service.Shelves { + shelves = append(shelves, *shelf) + } + response := &bookstore.ListShelvesResponse{} + response.Shelves = shelves + (*responses).OK = response + return err +} + +func (service *Service) CreateShelf(parameters *bookstore.CreateShelfParameters, responses *bookstore.CreateShelfResponses) (err error) { + service.Mutex.Lock() + defer service.Mutex.Unlock() + // assign an id and name to a shelf and add it to the Shelves map. + shelf := parameters.Shelf + service.LastShelfID++ + sid := service.LastShelfID + shelf.Name = fmt.Sprintf("shelves/%d", sid) + service.Shelves[sid] = &shelf + (*responses).OK = &shelf + return err +} + +func (service *Service) DeleteShelves() (err error) { + service.Mutex.Lock() + defer service.Mutex.Unlock() + // delete everything by reinitializing the Shelves and Books maps. + service.Shelves = make(map[int64]*bookstore.Shelf) + service.Books = make(map[int64]map[int64]*bookstore.Book) + service.LastShelfID = 0 + service.LastBookID = 0 + return nil +} + +func (service *Service) GetShelf(parameters *bookstore.GetShelfParameters, responses *bookstore.GetShelfResponses) (err error) { + service.Mutex.Lock() + defer service.Mutex.Unlock() + // look up a shelf from the Shelves map. + shelf, err := service.getShelf(parameters.Shelf) + if err != nil { + (*responses).Default = &bookstore.Error{Code: int32(http.StatusNotFound), Message: err.Error()} + return nil + } else { + (*responses).OK = shelf + return nil + } +} + +func (service *Service) DeleteShelf(parameters *bookstore.DeleteShelfParameters) (err error) { + service.Mutex.Lock() + defer service.Mutex.Unlock() + // delete a shelf by removing the shelf from the Shelves map and the associated books from the Books map. + delete(service.Shelves, parameters.Shelf) + delete(service.Books, parameters.Shelf) + return nil +} + +func (service *Service) ListBooks(parameters *bookstore.ListBooksParameters, responses *bookstore.ListBooksResponses) (err error) { + service.Mutex.Lock() + defer service.Mutex.Unlock() + // list the books in a shelf + _, err = service.getShelf(parameters.Shelf) + if err != nil { + (*responses).Default = &bookstore.Error{Code: int32(http.StatusNotFound), Message: err.Error()} + return nil + } + shelfBooks := service.Books[parameters.Shelf] + books := make([]bookstore.Book, 0, len(shelfBooks)) + for _, book := range shelfBooks { + books = append(books, *book) + } + response := &bookstore.ListBooksResponse{} + response.Books = books + (*responses).OK = response + return nil +} + +func (service *Service) CreateBook(parameters *bookstore.CreateBookParameters, responses *bookstore.CreateBookResponses) (err error) { + service.Mutex.Lock() + defer service.Mutex.Unlock() + // return "not found" if the shelf doesn't exist + shelf, err := service.getShelf(parameters.Shelf) + if err != nil { + (*responses).Default = &bookstore.Error{Code: int32(http.StatusNotFound), Message: err.Error()} + return nil + } + // assign an id and name to a book and add it to the Books map. + service.LastBookID++ + bid := service.LastBookID + book := parameters.Book + book.Name = fmt.Sprintf("%s/books/%d", shelf.Name, bid) + if service.Books[parameters.Shelf] == nil { + service.Books[parameters.Shelf] = make(map[int64]*bookstore.Book) + } + service.Books[parameters.Shelf][bid] = &book + (*responses).OK = &book + return err +} + +func (service *Service) GetBook(parameters *bookstore.GetBookParameters, responses *bookstore.GetBookResponses) (err error) { + service.Mutex.Lock() + defer service.Mutex.Unlock() + // get a book from the Books map + book, err := service.getBook(parameters.Shelf, parameters.Book) + if err != nil { + (*responses).Default = &bookstore.Error{Code: int32(http.StatusNotFound), Message: err.Error()} + } else { + (*responses).OK = book + } + return nil +} + +func (service *Service) DeleteBook(parameters *bookstore.DeleteBookParameters) (err error) { + service.Mutex.Lock() + defer service.Mutex.Unlock() + // delete a book by removing the book from the Books map. + delete(service.Books[parameters.Shelf], parameters.Book) + return nil +} + +// internal helpers + +func (service *Service) getShelf(sid int64) (shelf *bookstore.Shelf, err error) { + shelf, ok := service.Shelves[sid] + if !ok { + return nil, errors.New(fmt.Sprintf("Couldn't find shelf %d", sid)) + } else { + return shelf, nil + } +} + +func (service *Service) getBook(sid int64, bid int64) (book *bookstore.Book, err error) { + _, err = service.getShelf(sid) + if err != nil { + return nil, err + } + book, ok := service.Books[sid][bid] + if !ok { + return nil, errors.New(fmt.Sprintf("Couldn't find book %d on shelf %d", bid, sid)) + } else { + return book, nil + } +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/xkcd/Makefile b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/xkcd/Makefile new file mode 100644 index 000000000..5a71fb9be --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/xkcd/Makefile @@ -0,0 +1,3 @@ +all: + gnostic swagger.json --go-client-out=xkcd + go install diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/xkcd/main.go b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/xkcd/main.go new file mode 100644 index 000000000..1bd3448ee --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/xkcd/main.go @@ -0,0 +1,22 @@ +package main + +import ( + "fmt" + "github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/xkcd/xkcd" +) + +func main() { + c := xkcd.NewClient("http://xkcd.com") + + comic, err := c.Get_info_0_json() + if err != nil { + panic(err) + } + fmt.Printf("%+v\n", comic) + + comic, err = c.Get_comicId_info_0_json(1800) + if err != nil { + panic(err) + } + fmt.Printf("%+v\n", comic) +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/xkcd/swagger.json b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/xkcd/swagger.json new file mode 100644 index 000000000..6a74c5f44 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v2.0/xkcd/swagger.json @@ -0,0 +1,111 @@ +{ + "swagger": "2.0", + "schemes": [ + "http" + ], + "host": "xkcd.com", + "basePath": "/", + "info": { + "description": "Webcomic of romance, sarcasm, math, and language.", + "title": "XKCD", + "version": "1.0.0", + "x-apisguru-categories": [ + "media" + ], + "x-logo": { + "url": "https://api.apis.guru/v2/cache/logo/http_imgs.xkcd.com_static_terrible_small_logo.png" + }, + "x-origin": { + "format": "swagger", + "url": "https://raw.githubusercontent.com/APIs-guru/unofficial_openapi_specs/master/xkcd.com/1.0.0/swagger.yaml", + "version": "2.0" + }, + "x-preferred": true, + "x-providerName": "xkcd.com", + "x-tags": [ + "humor", + "comics" + ], + "x-unofficialSpec": true + }, + "externalDocs": { + "url": "https://xkcd.com/json.html" + }, + "securityDefinitions": {}, + "paths": { + "/info.0.json": { + "get": { + "description": "Fetch current comic and metadata.\n", + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/comic" + } + } + } + } + }, + "/{comicId}/info.0.json": { + "get": { + "description": "Fetch comics and metadata by comic id.\n", + "parameters": [ + { + "in": "path", + "name": "comicId", + "required": true, + "type": "number" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/comic" + } + } + } + } + } + }, + "definitions": { + "comic": { + "properties": { + "alt": { + "type": "string" + }, + "day": { + "type": "string" + }, + "img": { + "type": "string" + }, + "link": { + "type": "string" + }, + "month": { + "type": "string" + }, + "news": { + "type": "string" + }, + "num": { + "type": "number" + }, + "safe_title": { + "type": "string" + }, + "title": { + "type": "string" + }, + "transcript": { + "type": "string" + }, + "year": { + "type": "string" + } + }, + "type": "object" + } + } +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v3.0/bookstore/Makefile b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v3.0/bookstore/Makefile new file mode 100644 index 000000000..6d04686a3 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v3.0/bookstore/Makefile @@ -0,0 +1,20 @@ +build: + go get golang.org/x/tools/cmd/goimports + go install github.com/googleapis/gnostic + go install github.com/googleapis/gnostic/plugins/gnostic-go-generator + rm -f $(GOPATH)/bin/gnostic-go-client $(GOPATH)/bin/gnostic-go-server + ln -s $(GOPATH)/bin/gnostic-go-generator $(GOPATH)/bin/gnostic-go-client + ln -s $(GOPATH)/bin/gnostic-go-generator $(GOPATH)/bin/gnostic-go-server + +all: build + gnostic bookstore.json --go-generator-out=bookstore + +clean: + rm -rf bookstore bookstore.text service/service + +test: all + killall service; true # ignore errors due to no matching processes + cd service; go get .; go build; ./service & + go test + killall service + diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v3.0/bookstore/README.md b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v3.0/bookstore/README.md new file mode 100644 index 000000000..a684b3af6 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v3.0/bookstore/README.md @@ -0,0 +1,23 @@ +# Bookstore Example + +This directory contains an OpenAPI description of a simple bookstore API. + +Use this example to try the `gnostic-go-generator` plugin, which implements +`gnostic-go-client` and `gnostic-go-server` for generating API client and +server code, respectively. + +Run "make all" to build and install `gnostic` and the Go plugins. +It will generate both client and server code. The API client and +server code will be in the `bookstore` package. + +The `service` directory contains additional code that completes the server. +To build and run the service, `cd service` and do the following: + + go get . + go build + ./service & + +To test the service with the generated client, go back up to the top-level +directory and run `go test`. The test in `bookstore_test.go` uses client +code generated in `bookstore` to verify the service. + diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v3.0/bookstore/bookstore.json b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v3.0/bookstore/bookstore.json new file mode 100644 index 000000000..35fdd8611 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v3.0/bookstore/bookstore.json @@ -0,0 +1,392 @@ +{ + "openapi": "3.0.0", + "servers": [ + { + "url": "https://generated-bookstore.appspot.com/" + } + ], + "info": { + "description": "A simple Bookstore API example.", + "title": "Bookstore", + "version": "1.0.0" + }, + "paths": { + "/shelves": { + "get": { + "description": "Return all shelves in the bookstore.", + "operationId": "listShelves", + "responses": { + "200": { + "description": "List of shelves in the bookstore.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/listShelvesResponse" + } + } + } + } + }, + "security": [] + }, + "post": { + "description": "Create a new shelf in the bookstore.", + "operationId": "createShelf", + "responses": { + "200": { + "description": "A newly created shelf resource.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/shelf" + } + } + } + } + }, + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/shelf" + } + } + }, + "description": "A shelf resource to create.", + "required": true + } + }, + "delete": { + "description": "Delete all shelves.", + "operationId": "deleteShelves", + "responses": { + "default": { + "description": "An empty response body." + } + } + } + }, + "/shelves/{shelf}": { + "get": { + "description": "Get a single shelf resource with the given ID.", + "operationId": "getShelf", + "parameters": [ + { + "description": "ID of the shelf to get.", + "in": "path", + "name": "shelf", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "A shelf resource.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/shelf" + } + } + } + }, + "default": { + "description": "unexpected error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/error" + } + } + } + } + } + }, + "delete": { + "description": "Delete a single shelf with the given ID.", + "operationId": "deleteShelf", + "parameters": [ + { + "description": "ID of the shelf to delete.", + "in": "path", + "name": "shelf", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "default": { + "description": "An empty response body." + } + } + } + }, + "/shelves/{shelf}/books": { + "get": { + "description": "Return all books in a shelf with the given ID.", + "operationId": "listBooks", + "parameters": [ + { + "description": "ID of the shelf whose books should be returned.", + "in": "path", + "name": "shelf", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "List of books on the specified shelf.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/listBooksResponse" + } + } + } + }, + "default": { + "description": "unexpected error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/error" + } + } + } + } + } + }, + "post": { + "description": "Create a new book on the shelf.", + "operationId": "createBook", + "parameters": [ + { + "description": "ID of the shelf where the book should be created.", + "in": "path", + "name": "shelf", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "A newly created book resource.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/book" + } + } + } + }, + "default": { + "description": "unexpected error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/error" + } + } + } + } + }, + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/book" + } + } + }, + "description": "Book to create.", + "required": true + } + } + }, + "/shelves/{shelf}/books/{book}": { + "get": { + "description": "Get a single book with a given ID from a shelf.", + "operationId": "getBook", + "parameters": [ + { + "description": "ID of the shelf from which to get the book.", + "in": "path", + "name": "shelf", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + }, + { + "description": "ID of the book to get from the shelf.", + "in": "path", + "name": "book", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "A book resource.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/book" + } + } + } + }, + "default": { + "description": "unexpected error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/error" + } + } + } + } + } + }, + "delete": { + "description": "Delete a single book with a given ID from a shelf.", + "operationId": "deleteBook", + "parameters": [ + { + "description": "ID of the shelf from which to delete the book.", + "in": "path", + "name": "shelf", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + }, + { + "description": "ID of the book to delete from the shelf.", + "in": "path", + "name": "book", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "default": { + "description": "An empty response body." + } + } + } + } + }, + "security": [ + { + "api_key": [] + } + ], + "components": { + "schemas": { + "book": { + "properties": { + "author": { + "type": "string" + }, + "name": { + "type": "string" + }, + "title": { + "type": "string" + } + }, + "required": [ + "name", + "author", + "title" + ], + "type": "object" + }, + "listBooksResponse": { + "properties": { + "books": { + "items": { + "$ref": "#/components/schemas/book" + }, + "type": "array" + } + }, + "required": [ + "books" + ], + "type": "object" + }, + "listShelvesResponse": { + "properties": { + "shelves": { + "items": { + "$ref": "#/components/schemas/shelf" + }, + "type": "array" + } + }, + "type": "object" + }, + "shelf": { + "properties": { + "name": { + "type": "string" + }, + "theme": { + "type": "string" + } + }, + "required": [ + "name", + "theme" + ], + "type": "object" + }, + "error": { + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + } + }, + "type": "object" + } + }, + "securitySchemes": { + "api_key": { + "in": "query", + "name": "key", + "type": "apiKey" + } + } + } +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v3.0/bookstore/bookstore_test.go b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v3.0/bookstore/bookstore_test.go new file mode 100644 index 000000000..1490cf889 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v3.0/bookstore/bookstore_test.go @@ -0,0 +1,239 @@ +/* + Copyright 2017 Google Inc. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package test + +import ( + "fmt" + "net/http" + "strings" + "testing" + + "github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v3.0/bookstore/bookstore" +) + +const service = "http://localhost:8080" + +//const service = "http://generated-bookstore.appspot.com" + +func TestBookstore(t *testing.T) { + // create a client + b := bookstore.NewClient(service, nil) + // reset the service by deleting all shelves + { + err := b.DeleteShelves() + if err != nil { + t.Log("delete shelves failed") + t.Fail() + } + } + // verify that the service has no shelves + { + response, err := b.ListShelves() + if err != nil { + t.Log("list shelves failed") + t.Fail() + } + if (response == nil) || (response.OK == nil) || (response.OK.Shelves != nil) { + t.Log(fmt.Sprintf("list shelves failed %+v", response.OK)) + t.Log(fmt.Sprintf("list shelves failed len=%d", len(response.OK.Shelves))) + t.Fail() + } + } + // attempting to get a shelf should return an error + { + response, err := b.GetShelf(1) + if err == nil { + t.Logf("get shelf failed to return an error (%+v)", response.OK) + t.Fail() + } + } + // attempting to get a book should return an error + { + response, err := b.GetBook(1, 2) + if err == nil { + t.Logf("get book failed to return an error (%+v)", response.OK) + t.Fail() + } + } + // add a shelf + { + var shelf bookstore.Shelf + shelf.Theme = "mysteries" + response, err := b.CreateShelf(shelf) + if err != nil { + t.Log("create shelf mysteries failed") + t.Fail() + } + if (response.OK.Name != "shelves/1") || + (response.OK.Theme != "mysteries") { + t.Log("create shelf mysteries failed") + t.Fail() + } + } + // add another shelf + { + var shelf bookstore.Shelf + shelf.Theme = "comedies" + response, err := b.CreateShelf(shelf) + if err != nil { + t.Log("create shelf comedies failed") + t.Fail() + } + if (response.OK.Name != "shelves/2") || + (response.OK.Theme != "comedies") { + t.Log("create shelf comedies failed") + t.Fail() + } + } + // get the first shelf that was added + { + response, err := b.GetShelf(1) + if err != nil { + t.Log("get shelf mysteries failed") + t.Fail() + } + if (response.OK.Name != "shelves/1") || + (response.OK.Theme != "mysteries") { + t.Log("get shelf mysteries failed") + t.Fail() + } + } + // list shelves and verify that there are 2 + { + response, err := b.ListShelves() + if err != nil { + t.Log("list shelves failed") + t.Fail() + } + if len(response.OK.Shelves) != 2 { + t.Log("list shelves failed") + t.Fail() + } + } + // delete a shelf + { + err := b.DeleteShelf(2) + if err != nil { + t.Log("delete shelf failed") + t.Fail() + } + } + // list shelves and verify that there is only 1 + { + response, err := b.ListShelves() + if err != nil { + t.Log("list shelves failed") + t.Fail() + } + if len(response.OK.Shelves) != 1 { + t.Log("list shelves failed") + t.Fail() + } + } + // list books on a shelf, verify that there are none + { + response, err := b.ListBooks(1) + if err != nil { + t.Log("list books failed") + t.Fail() + } + if len(response.OK.Books) != 0 { + t.Log("list books failed") + t.Fail() + } + } + // create a book + { + var book bookstore.Book + book.Author = "Agatha Christie" + book.Title = "And Then There Were None" + _, err := b.CreateBook(1, book) + if err != nil { + t.Log("create book failed") + t.Fail() + } + } + // create another book + { + var book bookstore.Book + book.Author = "Agatha Christie" + book.Title = "Murder on the Orient Express" + _, err := b.CreateBook(1, book) + if err != nil { + t.Log("create book failed") + t.Fail() + } + } + // get the first book that was added + { + _, err := b.GetBook(1, 1) + if err != nil { + t.Log("get book failed") + t.Fail() + } + } + // list the books on a shelf and verify that there are 2 + { + response, err := b.ListBooks(1) + if err != nil { + t.Log("list books failed") + t.Fail() + } + if len(response.OK.Books) != 2 { + t.Log("list books failed") + t.Fail() + } + } + // delete a book + { + err := b.DeleteBook(1, 2) + if err != nil { + t.Log("delete book failed") + t.Fail() + } + } + // list the books on a shelf and verify that is only 1 + { + response, err := b.ListBooks(1) + if err != nil { + t.Log("list books failed") + t.Fail() + } + if len(response.OK.Books) != 1 { + t.Log("list books failed") + t.Fail() + } + } + // verify the handling of a badly-formed request + { + req, err := http.NewRequest("POST", service+"/shelves", strings.NewReader("")) + if err != nil { + t.Log("bad request failed") + return + } + resp, err := http.DefaultClient.Do(req) + if err != nil { + return + } + // we expect a 400 (Bad Request) code + if resp.StatusCode != 400 { + t.Log("bad request failed") + t.Fail() + } + return + } +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v3.0/bookstore/service/app.yaml b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v3.0/bookstore/service/app.yaml new file mode 100644 index 000000000..9c262ce19 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v3.0/bookstore/service/app.yaml @@ -0,0 +1,9 @@ +application: bookstore +version: 1 +runtime: go +api_version: go1 +handlers: +- url: /.* + script: _go_app +- url: / + static_dir: static diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v3.0/bookstore/service/init.go b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v3.0/bookstore/service/init.go new file mode 100644 index 000000000..447b28e1d --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v3.0/bookstore/service/init.go @@ -0,0 +1,27 @@ +/* + Copyright 2017 Google Inc. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package main + +import ( + "github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v3.0/bookstore/bookstore" +) + +// init() is called when the package is loaded +// this allows this app to be trivially deployed to Google App Engine, which does not call main() +func init() { + bookstore.Initialize(NewService()) +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v3.0/bookstore/service/main.go b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v3.0/bookstore/service/main.go new file mode 100644 index 000000000..863f76fa8 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v3.0/bookstore/service/main.go @@ -0,0 +1,34 @@ +// +build !appengine + +// This file is omitted when the app is built for Google App Engine + +/* + Copyright 2017 Google Inc. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package main + +import ( + "log" + + "github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v3.0/bookstore/bookstore" +) + +func main() { + err := bookstore.ServeHTTP(":8080") + if err != nil { + log.Printf("%v", err) + } +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v3.0/bookstore/service/service.go b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v3.0/bookstore/service/service.go new file mode 100644 index 000000000..d2a3903e2 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v3.0/bookstore/service/service.go @@ -0,0 +1,195 @@ +/* + Copyright 2017 Google Inc. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package main + +import ( + "errors" + "fmt" + "net/http" + "sync" + + "github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v3.0/bookstore/bookstore" +) + +// +// The Service type implements a bookstore service. +// All objects are managed in an in-memory non-persistent store. +// +type Service struct { + // shelves are stored in a map keyed by shelf id + // books are stored in a two level map, keyed first by shelf id and then by book id + Shelves map[int64]*bookstore.Shelf + Books map[int64]map[int64]*bookstore.Book + LastShelfID int64 // the id of the last shelf that was added + LastBookID int64 // the id of the last book that was added + Mutex sync.Mutex // global mutex to synchronize service access +} + +func NewService() *Service { + return &Service{ + Shelves: make(map[int64]*bookstore.Shelf), + Books: make(map[int64]map[int64]*bookstore.Book), + } +} + +func (service *Service) ListShelves(responses *bookstore.ListShelvesResponses) (err error) { + service.Mutex.Lock() + defer service.Mutex.Unlock() + // copy shelf ids from Shelves map keys + shelves := make([]bookstore.Shelf, 0, len(service.Shelves)) + for _, shelf := range service.Shelves { + shelves = append(shelves, *shelf) + } + response := &bookstore.ListShelvesResponse{} + response.Shelves = shelves + (*responses).OK = response + return err +} + +func (service *Service) CreateShelf(parameters *bookstore.CreateShelfParameters, responses *bookstore.CreateShelfResponses) (err error) { + service.Mutex.Lock() + defer service.Mutex.Unlock() + // assign an id and name to a shelf and add it to the Shelves map. + shelf := parameters.Shelf + service.LastShelfID++ + sid := service.LastShelfID + shelf.Name = fmt.Sprintf("shelves/%d", sid) + service.Shelves[sid] = &shelf + (*responses).OK = &shelf + return err +} + +func (service *Service) DeleteShelves() (err error) { + service.Mutex.Lock() + defer service.Mutex.Unlock() + // delete everything by reinitializing the Shelves and Books maps. + service.Shelves = make(map[int64]*bookstore.Shelf) + service.Books = make(map[int64]map[int64]*bookstore.Book) + service.LastShelfID = 0 + service.LastBookID = 0 + return nil +} + +func (service *Service) GetShelf(parameters *bookstore.GetShelfParameters, responses *bookstore.GetShelfResponses) (err error) { + service.Mutex.Lock() + defer service.Mutex.Unlock() + // look up a shelf from the Shelves map. + shelf, err := service.getShelf(parameters.Shelf) + if err != nil { + (*responses).Default = &bookstore.Error{Code: int32(http.StatusNotFound), Message: err.Error()} + return nil + } else { + (*responses).OK = shelf + return nil + } +} + +func (service *Service) DeleteShelf(parameters *bookstore.DeleteShelfParameters) (err error) { + service.Mutex.Lock() + defer service.Mutex.Unlock() + // delete a shelf by removing the shelf from the Shelves map and the associated books from the Books map. + delete(service.Shelves, parameters.Shelf) + delete(service.Books, parameters.Shelf) + return nil +} + +func (service *Service) ListBooks(parameters *bookstore.ListBooksParameters, responses *bookstore.ListBooksResponses) (err error) { + service.Mutex.Lock() + defer service.Mutex.Unlock() + // list the books in a shelf + _, err = service.getShelf(parameters.Shelf) + if err != nil { + (*responses).Default = &bookstore.Error{Code: int32(http.StatusNotFound), Message: err.Error()} + return nil + } + shelfBooks := service.Books[parameters.Shelf] + books := make([]bookstore.Book, 0, len(shelfBooks)) + for _, book := range shelfBooks { + books = append(books, *book) + } + response := &bookstore.ListBooksResponse{} + response.Books = books + (*responses).OK = response + return nil +} + +func (service *Service) CreateBook(parameters *bookstore.CreateBookParameters, responses *bookstore.CreateBookResponses) (err error) { + service.Mutex.Lock() + defer service.Mutex.Unlock() + // return "not found" if the shelf doesn't exist + shelf, err := service.getShelf(parameters.Shelf) + if err != nil { + (*responses).Default = &bookstore.Error{Code: int32(http.StatusNotFound), Message: err.Error()} + return nil + } + // assign an id and name to a book and add it to the Books map. + service.LastBookID++ + bid := service.LastBookID + book := parameters.Book + book.Name = fmt.Sprintf("%s/books/%d", shelf.Name, bid) + if service.Books[parameters.Shelf] == nil { + service.Books[parameters.Shelf] = make(map[int64]*bookstore.Book) + } + service.Books[parameters.Shelf][bid] = &book + (*responses).OK = &book + return err +} + +func (service *Service) GetBook(parameters *bookstore.GetBookParameters, responses *bookstore.GetBookResponses) (err error) { + service.Mutex.Lock() + defer service.Mutex.Unlock() + // get a book from the Books map + book, err := service.getBook(parameters.Shelf, parameters.Book) + if err != nil { + (*responses).Default = &bookstore.Error{Code: int32(http.StatusNotFound), Message: err.Error()} + } else { + (*responses).OK = book + } + return nil +} + +func (service *Service) DeleteBook(parameters *bookstore.DeleteBookParameters) (err error) { + service.Mutex.Lock() + defer service.Mutex.Unlock() + // delete a book by removing the book from the Books map. + delete(service.Books[parameters.Shelf], parameters.Book) + return nil +} + +// internal helpers + +func (service *Service) getShelf(sid int64) (shelf *bookstore.Shelf, err error) { + shelf, ok := service.Shelves[sid] + if !ok { + return nil, errors.New(fmt.Sprintf("Couldn't find shelf %d", sid)) + } else { + return shelf, nil + } +} + +func (service *Service) getBook(sid int64, bid int64) (book *bookstore.Book, err error) { + _, err = service.getShelf(sid) + if err != nil { + return nil, err + } + book, ok := service.Books[sid][bid] + if !ok { + return nil, errors.New(fmt.Sprintf("Couldn't find book %d on shelf %d", bid, sid)) + } else { + return book, nil + } +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v3.0/urlshortener/README.md b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v3.0/urlshortener/README.md new file mode 100644 index 000000000..24b23fd4f --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v3.0/urlshortener/README.md @@ -0,0 +1,26 @@ +# urlshortener sample client + +## Steps to run: + +1. Generate the OpenAPI 3.0 description using `disco` (in the `gnostic/apps` directory). + + disco get urlshortener --openapi3 + +2. (optional) View the JSON OpenAPI 3.0 description. + + gnostic openapi3-urlshortener-v1.pb --json-out=- + +3. Generate the urlshortener client. + + gnostic openapi3-urlshortener-v1.pb --go-client-out=urlshortener + +4. Build the client. + + go install + +5. Download `client_secrets.json` from the Google Cloud Developer Console. + +6. Run the client + + urlshortener + diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v3.0/urlshortener/main.go b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v3.0/urlshortener/main.go new file mode 100644 index 000000000..0db26a81a --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v3.0/urlshortener/main.go @@ -0,0 +1,62 @@ +package main + +import ( + "fmt" + "log" + + "github.com/docopt/docopt-go" + "github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/googleauth" + "github.com/googleapis/gnostic/plugins/gnostic-go-generator/examples/v3.0/urlshortener/urlshortener" +) + +func main() { + usage := ` +Usage: + urlshortener get + urlshortener list + urlshortener insert + ` + arguments, err := docopt.Parse(usage, nil, false, "URL Shortener 1.0", false) + if err != nil { + log.Fatalf("%+v", err) + } + + path := "https://www.googleapis.com/urlshortener/v1" // this should be generated + + client, err := googleauth.NewOAuth2Client("https://www.googleapis.com/auth/urlshortener") + if err != nil { + log.Fatalf("Error building OAuth client: %v", err) + } + c := urlshortener.NewClient(path, client) + + // get + if arguments["get"].(bool) { + response, err := c.Urlshortener_Url_Get("FULL", arguments[""].(string)) + if err != nil { + log.Fatalf("%+v", err) + } + fmt.Println(response.Default.LongUrl) + } + + // list + if arguments["list"].(bool) { + response, err := c.Urlshortener_Url_List("", "") + if err != nil { + log.Fatalf("%+v", err) + } + for _, item := range response.Default.Items { + fmt.Printf("%-40s %s\n", item.Id, item.LongUrl) + } + } + + // insert + if arguments["insert"].(bool) { + var url urlshortener.Url + url.LongUrl = arguments[""].(string) + response, err := c.Urlshortener_Url_Insert(url) + if err != nil { + log.Fatalf("%+v", err) + } + fmt.Printf("%+v\n", response.Default.Id) + } +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/goimports.go b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/goimports.go new file mode 100644 index 000000000..a2cbd089b --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/goimports.go @@ -0,0 +1,50 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "io/ioutil" + "log" + "os" + "os/exec" + "strings" +) + +// Run goimports to format and update imports statements in generated code. +func goimports(filename string, inputBytes []byte) (outputBytes []byte, err error) { + if false { + return inputBytes, nil + } + cmd := exec.Command(os.Getenv("GOPATH") + "/bin/goimports") + input, _ := cmd.StdinPipe() + output, _ := cmd.StdoutPipe() + cmderr, _ := cmd.StderrPipe() + err = cmd.Start() + if err != nil { + return + } + input.Write(inputBytes) + input.Close() + + outputBytes, _ = ioutil.ReadAll(output) + errors, _ := ioutil.ReadAll(cmderr) + if len(errors) > 0 { + errors := strings.Replace(string(errors), "", filename, -1) + log.Printf("Syntax errors in generated code:\n%s", errors) + return inputBytes, nil + } + + return +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/language.go b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/language.go new file mode 100644 index 000000000..704370874 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/language.go @@ -0,0 +1,121 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + surface "github.com/googleapis/gnostic/surface" + "unicode" + "strings" +) + +type GoLanguageModel struct{} + +func NewGoLanguageModel() *GoLanguageModel { + return &GoLanguageModel{} +} + +// Prepare sets language-specific properties for all types and methods. +func (language *GoLanguageModel) Prepare(model *surface.Model) { + + for _, t := range model.Types { + // determine the type used for Go language implementation of the type + t.TypeName = strings.Title(filteredTypeName(t.Name)) + + for _, f := range t.Fields { + f.FieldName = goFieldName(f.Name) + f.ParameterName = goParameterName(f.Name) + switch f.Type { + case "number": + f.NativeType = "int" + case "integer": + switch f.Format { + case "int32": + f.NativeType = "int32" + case "int64": + f.NativeType = "int64" + default: + f.NativeType = "int64" + } + case "object": + f.NativeType = "{}interface" + case "string": + f.NativeType = "string" + default: + f.NativeType = strings.Title(f.Type) + } + } + } + + for _, m := range model.Methods { + m.HandlerName = "Handle" + m.Name + m.ProcessorName = m.Name + m.ClientName = m.Name + } +} + +func goParameterName(name string) string { + // lowercase first letter + a := []rune(name) + a[0] = unicode.ToLower(a[0]) + name = string(a) + // replace dots with underscores + name = strings.Replace(name, ".", "_", -1) + // replaces dashes with underscores + name = strings.Replace(name, "-", "_", -1) + // avoid reserved words + if name == "type" { + return "myType" + } + return name +} + +func goFieldName(name string) string { + name = strings.Replace(name, ".", "_", -1) + name = strings.Replace(name, "-", "_", -1) + name = snakeCaseToCamelCaseWithCapitalizedFirstLetter(name) + // avoid integers + if name == "200" { + return "OK" + } + return name +} + +func snakeCaseToCamelCaseWithCapitalizedFirstLetter(snakeCase string) (camelCase string) { + isToUpper := false + for _, runeValue := range snakeCase { + if isToUpper { + camelCase += strings.ToUpper(string(runeValue)) + isToUpper = false + } else { + if runeValue == '_' { + isToUpper = true + } else { + camelCase += string(runeValue) + } + } + } + camelCase = strings.Title(camelCase) + return +} + +func filteredTypeName(typeName string) (name string) { + // first take the last path segment + parts := strings.Split(typeName, "/") + name = parts[len(parts)-1] + // then take the last part of a dotted name + parts = strings.Split(name, ".") + name = parts[len(parts)-1] + return name +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/linewriter.go b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/linewriter.go new file mode 100644 index 000000000..d09160147 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/linewriter.go @@ -0,0 +1,29 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import "bytes" + +type LineWriter struct { + bytes.Buffer +} + +func NewLineWriter() *LineWriter { + return &LineWriter{} +} + +func (w *LineWriter) WriteLine(line string) { + w.WriteString(line + "\n") +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/main.go b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/main.go new file mode 100644 index 000000000..cbdab39d1 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/main.go @@ -0,0 +1,71 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// gnostic_go_generator is a sample Gnostic plugin that generates Go +// code that supports an API. +package main + +import ( + "encoding/json" + "errors" + "strings" + + plugins "github.com/googleapis/gnostic/plugins" +) + +// This is the main function for the code generation plugin. +func main() { + env, err := plugins.NewEnvironment() + env.RespondAndExitIfError(err) + + packageName := env.Request.OutputPath + + // Use the name used to run the plugin to decide which files to generate. + var files []string + switch { + case strings.Contains(env.Invocation, "gnostic-go-client"): + files = []string{"client.go", "types.go", "constants.go"} + case strings.Contains(env.Invocation, "gnostic-go-server"): + files = []string{"server.go", "provider.go", "types.go", "constants.go"} + default: + files = []string{"client.go", "server.go", "provider.go", "types.go", "constants.go"} + } + + // Get the code surface model. + model := env.Request.Surface + + if model == nil { + err = errors.New("No generated code surface model is available.") + env.RespondAndExitIfError(err) + } + + // Customize the code surface model for Go + NewGoLanguageModel().Prepare(model) + + modelJSON, _ := json.MarshalIndent(model, "", " ") + modelFile := &plugins.File{Name: "model.json", Data: modelJSON} + env.Response.Files = append(env.Response.Files, modelFile) + + // Create the renderer. + renderer, err := NewServiceRenderer(model) + renderer.Package = packageName + env.RespondAndExitIfError(err) + + // Run the renderer to generate files and add them to the response object. + err = renderer.Render(env.Response, files) + env.RespondAndExitIfError(err) + + // Return with success. + env.RespondAndExit() +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/render_client.go b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/render_client.go new file mode 100644 index 000000000..6d93b4e7b --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/render_client.go @@ -0,0 +1,176 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "strings" + + surface "github.com/googleapis/gnostic/surface" +) + +// ParameterList returns a string representation of a method's parameters +func ParameterList(parametersType *surface.Type) string { + result := "" + if parametersType != nil { + for _, field := range parametersType.Fields { + result += field.ParameterName + " " + field.NativeType + "," + "\n" + } + } + return result +} + +func (renderer *Renderer) RenderClient() ([]byte, error) { + f := NewLineWriter() + + f.WriteLine("// GENERATED FILE: DO NOT EDIT!") + f.WriteLine(``) + f.WriteLine("package " + renderer.Package) + + // imports will be automatically added by goimports + + f.WriteLine(`// Client represents an API client.`) + f.WriteLine(`type Client struct {`) + f.WriteLine(` service string`) + f.WriteLine(` APIKey string`) + f.WriteLine(` client *http.Client`) + f.WriteLine(`}`) + + f.WriteLine(`// NewClient creates an API client.`) + f.WriteLine(`func NewClient(service string, c *http.Client) *Client {`) + f.WriteLine(` client := &Client{}`) + f.WriteLine(` client.service = service`) + f.WriteLine(` if c != nil {`) + f.WriteLine(` client.client = c`) + f.WriteLine(` } else {`) + f.WriteLine(` client.client = http.DefaultClient`) + f.WriteLine(` }`) + f.WriteLine(` return client`) + f.WriteLine(`}`) + + for _, method := range renderer.Model.Methods { + parametersType := renderer.Model.TypeWithTypeName(method.ParametersTypeName) + responsesType := renderer.Model.TypeWithTypeName(method.ResponsesTypeName) + + f.WriteLine(commentForText(method.Description)) + f.WriteLine(`func (client *Client) ` + method.ClientName + `(`) + f.WriteLine(ParameterList(parametersType) + `) (`) + if method.ResponsesTypeName == "" { + f.WriteLine(`err error,`) + } else { + f.WriteLine(`response *` + method.ResponsesTypeName + `,`) + f.WriteLine(`err error,`) + } + f.WriteLine(` ) {`) + + path := method.Path + path = strings.Replace(path, "{+", "{", -1) + f.WriteLine(`path := client.service + "` + path + `"`) + + if parametersType != nil { + if parametersType.HasFieldWithPosition(surface.Position_PATH) { + for _, field := range parametersType.Fields { + if field.Position == surface.Position_PATH { + f.WriteLine(`path = strings.Replace(path, "{` + field.Name + `}", fmt.Sprintf("%v", ` + + field.ParameterName + `), 1)`) + } + } + } + if parametersType.HasFieldWithPosition(surface.Position_QUERY) { + f.WriteLine(`v := url.Values{}`) + for _, field := range parametersType.Fields { + if field.Position == surface.Position_QUERY { + if field.NativeType == "string" { + f.WriteLine(`if (` + field.ParameterName + ` != "") {`) + f.WriteLine(` v.Set("` + field.Name + `", ` + field.ParameterName + `)`) + f.WriteLine(`}`) + } + } + } + f.WriteLine(`if client.APIKey != "" {`) + f.WriteLine(` v.Set("key", client.APIKey)`) + f.WriteLine(`}`) + f.WriteLine(`if len(v) > 0 {`) + f.WriteLine(` path = path + "?" + v.Encode()`) + f.WriteLine(`}`) + } + } + + if method.Method == "POST" { + f.WriteLine(`body := new(bytes.Buffer)`) + f.WriteLine(`json.NewEncoder(body).Encode(` + parametersType.FieldWithPosition(surface.Position_BODY).Name + `)`) + f.WriteLine(`req, err := http.NewRequest("` + method.Method + `", path, body)`) + f.WriteLine(`reqHeaders := make(http.Header)`) + f.WriteLine(`reqHeaders.Set("Content-Type", "application/json")`) + f.WriteLine(`req.Header = reqHeaders`) + } else { + f.WriteLine(`req, err := http.NewRequest("` + method.Method + `", path, nil)`) + } + f.WriteLine(`if err != nil {return}`) + f.WriteLine(`resp, err := client.client.Do(req)`) + f.WriteLine(`if err != nil {return}`) + f.WriteLine(`defer resp.Body.Close()`) + f.WriteLine(`if resp.StatusCode != 200 {`) + + if responsesType != nil { + f.WriteLine(` return nil, errors.New(resp.Status)`) + } else { + f.WriteLine(` return errors.New(resp.Status)`) + } + f.WriteLine(`}`) + + if responsesType != nil { + f.WriteLine(`response = &` + responsesType.Name + `{}`) + + f.WriteLine(`switch {`) + // first handle everything that isn't "default" + for _, responseField := range responsesType.Fields { + if responseField.Name != "default" { + f.WriteLine(`case resp.StatusCode == ` + responseField.Name + `:`) + f.WriteLine(` body, err := ioutil.ReadAll(resp.Body)`) + f.WriteLine(` if err != nil {return nil, err}`) + f.WriteLine(` result := &` + responseField.NativeType + `{}`) + f.WriteLine(` err = json.Unmarshal(body, result)`) + f.WriteLine(` if err != nil {return nil, err}`) + f.WriteLine(` response.` + responseField.FieldName + ` = result`) + } + } + + // then handle "default" + hasDefault := false + for _, responseField := range responsesType.Fields { + if responseField.Name == "default" { + hasDefault = true + f.WriteLine(`default:`) + f.WriteLine(` defer resp.Body.Close()`) + f.WriteLine(` body, err := ioutil.ReadAll(resp.Body)`) + f.WriteLine(` if err != nil {return nil, err}`) + f.WriteLine(` result := &` + responseField.NativeType + `{}`) + f.WriteLine(` err = json.Unmarshal(body, result)`) + f.WriteLine(` if err != nil {return nil, err}`) + f.WriteLine(` response.` + responseField.FieldName + ` = result`) + } + } + if !hasDefault { + f.WriteLine(`default:`) + f.WriteLine(` break`) + } + f.WriteLine(`}`) // close switch statement + } + f.WriteLine("return") + f.WriteLine("}") + } + + return f.Bytes(), nil +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/render_constants.go b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/render_constants.go new file mode 100644 index 000000000..4a27c63c8 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/render_constants.go @@ -0,0 +1,30 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +func (renderer *Renderer) RenderConstants() ([]byte, error) { + f := NewLineWriter() + f.WriteLine("// GENERATED FILE: DO NOT EDIT!") + f.WriteLine(``) + f.WriteLine("package " + renderer.Package) + f.WriteLine(``) + f.WriteLine(`// ServicePath is the base URL of the service.`) + f.WriteLine(`const ServicePath = "` + `"`) + f.WriteLine(``) + f.WriteLine(`// OAuthScopes lists the OAuth scopes required by the service.`) + f.WriteLine(`const OAuthScopes = "` + `"`) + + return f.Bytes(), nil +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/render_provider.go b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/render_provider.go new file mode 100644 index 000000000..9031abbe8 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/render_provider.go @@ -0,0 +1,64 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "strings" +) + +func (renderer *Renderer) RenderProvider() ([]byte, error) { + f := NewLineWriter() + f.WriteLine("// GENERATED FILE: DO NOT EDIT!\n") + f.WriteLine("package " + renderer.Package) + f.WriteLine(``) + f.WriteLine(`// To create a server, first write a class that implements this interface.`) + f.WriteLine(`// Then pass an instance of it to Initialize().`) + f.WriteLine(`type Provider interface {`) + for _, method := range renderer.Model.Methods { + parametersType := renderer.Model.TypeWithTypeName(method.ParametersTypeName) + responsesType := renderer.Model.TypeWithTypeName(method.ResponsesTypeName) + f.WriteLine(``) + f.WriteLine(commentForText(method.Description)) + if parametersType != nil { + if responsesType != nil { + f.WriteLine(method.ProcessorName + + `(parameters *` + parametersType.Name + + `, responses *` + responsesType.Name + `) (err error)`) + } else { + f.WriteLine(method.ProcessorName + `(parameters *` + parametersType.Name + `) (err error)`) + } + } else { + if responsesType != nil { + f.WriteLine(method.ProcessorName + `(responses *` + responsesType.Name + `) (err error)`) + } else { + f.WriteLine(method.ProcessorName + `() (err error)`) + } + } + } + f.WriteLine(`}`) + return f.Bytes(), nil +} + +func commentForText(text string) string { + result := "" + lines := strings.Split(text, "\n") + for i, line := range lines { + if i > 0 { + result += "\n" + } + result += "// " + line + } + return result +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/render_server.go b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/render_server.go new file mode 100644 index 000000000..73ecc6f3a --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/render_server.go @@ -0,0 +1,168 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "fmt" + + surface "github.com/googleapis/gnostic/surface" +) + +func (renderer *Renderer) RenderServer() ([]byte, error) { + f := NewLineWriter() + f.WriteLine("// GENERATED FILE: DO NOT EDIT!") + f.WriteLine(``) + f.WriteLine("package " + renderer.Package) + f.WriteLine(``) + imports := []string{ + "github.com/gorilla/mux", + "net/http", + } + f.WriteLine(``) + f.WriteLine(`import (`) + for _, imp := range imports { + f.WriteLine(`"` + imp + `"`) + } + f.WriteLine(`)`) + + f.WriteLine(`func intValue(s string) (v int64) {`) + f.WriteLine(` v, _ = strconv.ParseInt(s, 10, 64)`) + f.WriteLine(` return v`) + f.WriteLine(`}`) + f.WriteLine(``) + f.WriteLine(`// This package-global variable holds the user-written Provider for API services.`) + f.WriteLine(`// See the Provider interface for details.`) + f.WriteLine(`var provider Provider`) + f.WriteLine(``) + f.WriteLine(`// These handlers serve API methods.`) + f.WriteLine(``) + + for _, method := range renderer.Model.Methods { + parametersType := renderer.Model.TypeWithTypeName(method.ParametersTypeName) + responsesType := renderer.Model.TypeWithTypeName(method.ResponsesTypeName) + + f.WriteLine(`// Handler`) + f.WriteLine(commentForText(method.Description)) + f.WriteLine(`func ` + method.HandlerName + `(w http.ResponseWriter, r *http.Request) {`) + f.WriteLine(` var err error`) + if parametersType != nil { + f.WriteLine(`// instantiate the parameters structure`) + f.WriteLine(`parameters := &` + parametersType.Name + `{}`) + if method.Method == "POST" { + f.WriteLine(`// deserialize request from post data`) + f.WriteLine(`decoder := json.NewDecoder(r.Body)`) + f.WriteLine(`err = decoder.Decode(¶meters.` + + parametersType.FieldWithPosition(surface.Position_BODY).FieldName + `)`) + f.WriteLine(`if err != nil {`) + f.WriteLine(` w.WriteHeader(http.StatusBadRequest)`) + f.WriteLine(` w.Write([]byte(err.Error() + "\n"))`) + f.WriteLine(` return`) + f.WriteLine(`}`) + } + f.WriteLine(`// get request fields in path and query parameters`) + if parametersType.HasFieldWithPosition(surface.Position_PATH) { + f.WriteLine(`vars := mux.Vars(r)`) + } + if parametersType.HasFieldWithPosition(surface.Position_FORMDATA) { + f.WriteLine(`r.ParseForm()`) + } + for _, field := range parametersType.Fields { + if field.Position == surface.Position_PATH { + if field.Type == "string" { + f.WriteLine(fmt.Sprintf("// %+v", field)) + f.WriteLine(`if value, ok := vars["` + field.Name + `"]; ok {`) + f.WriteLine(` parameters.` + field.FieldName + ` = value`) + f.WriteLine(`}`) + } else { + f.WriteLine(`if value, ok := vars["` + field.Name + `"]; ok {`) + f.WriteLine(` parameters.` + field.FieldName + ` = intValue(value)`) + f.WriteLine(`}`) + } + } else if field.Position == surface.Position_FORMDATA { + f.WriteLine(`if len(r.Form["` + field.Name + `"]) > 0 {`) + f.WriteLine(` parameters.` + field.FieldName + ` = intValue(r.Form["` + field.Name + `"][0])`) + f.WriteLine(`}`) + } + } + } + if responsesType != nil { + f.WriteLine(`// instantiate the responses structure`) + f.WriteLine(`responses := &` + method.ResponsesTypeName + `{}`) + } + f.WriteLine(`// call the service provider`) + callLine := `err = provider.` + method.ProcessorName + if parametersType != nil { + if responsesType != nil { + callLine += `(parameters, responses)` + } else { + callLine += `(parameters)` + } + } else { + if responsesType != nil { + callLine += `(responses)` + } else { + callLine += `()` + } + } + f.WriteLine(callLine) + f.WriteLine(`if err == nil {`) + if responsesType != nil { + if responsesType.HasFieldWithName("OK") { + f.WriteLine(`if responses.OK != nil {`) + f.WriteLine(` // write the normal response`) + f.WriteLine(` encoder := json.NewEncoder(w)`) + f.WriteLine(` encoder.Encode(responses.OK)`) + f.WriteLine(` return`) + f.WriteLine(`}`) + } + if responsesType.HasFieldWithName("Default") { + f.WriteLine(`if responses.Default != nil {`) + f.WriteLine(` // write the error response`) + if responsesType.FieldWithName("Default").ServiceType(renderer.Model).FieldWithName("Code") != nil { + f.WriteLine(` w.WriteHeader(int(responses.Default.Code))`) + } + f.WriteLine(` encoder := json.NewEncoder(w)`) + f.WriteLine(` encoder.Encode(responses.Default)`) + f.WriteLine(` return`) + f.WriteLine(`}`) + } + } + f.WriteLine(`} else {`) + f.WriteLine(` w.WriteHeader(http.StatusInternalServerError)`) + f.WriteLine(` w.Write([]byte(err.Error() + "\n"))`) + f.WriteLine(` return`) + f.WriteLine(`}`) + f.WriteLine(`}`) + f.WriteLine(``) + } + f.WriteLine(`// Initialize the API service.`) + f.WriteLine(`func Initialize(p Provider) {`) + f.WriteLine(` provider = p`) + f.WriteLine(` var router = mux.NewRouter()`) + for _, method := range renderer.Model.Methods { + f.WriteLine(`router.HandleFunc("` + method.Path + `", ` + method.HandlerName + `).Methods("` + method.Method + `")`) + } + f.WriteLine(` http.Handle("/", router)`) + f.WriteLine(`}`) + f.WriteLine(``) + f.WriteLine(`// Provide the API service over HTTP.`) + f.WriteLine(`func ServeHTTP(address string) error {`) + f.WriteLine(` if provider == nil {`) + f.WriteLine(` return errors.New("Use ` + renderer.Package + `.Initialize() to set a service provider.")`) + f.WriteLine(` }`) + f.WriteLine(` return http.ListenAndServe(address, nil)`) + f.WriteLine(`}`) + return f.Bytes(), nil +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/render_types.go b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/render_types.go new file mode 100644 index 000000000..2d7ab1e5f --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/render_types.go @@ -0,0 +1,57 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + surface "github.com/googleapis/gnostic/surface" +) + +func (renderer *Renderer) RenderTypes() ([]byte, error) { + f := NewLineWriter() + f.WriteLine(`// GENERATED FILE: DO NOT EDIT!`) + f.WriteLine(``) + f.WriteLine(`package ` + renderer.Package) + f.WriteLine(`// Types used by the API.`) + for _, modelType := range renderer.Model.Types { + f.WriteLine(`// ` + modelType.Description) + if modelType.Kind == surface.TypeKind_STRUCT { + f.WriteLine(`type ` + modelType.TypeName + ` struct {`) + for _, field := range modelType.Fields { + prefix := "" + if field.Kind == surface.FieldKind_REFERENCE { + prefix = "*" + } else if field.Kind == surface.FieldKind_ARRAY { + prefix = "[]" + } else if field.Kind == surface.FieldKind_MAP { + prefix = "map[string]" + } + f.WriteLine(field.FieldName + ` ` + prefix + field.NativeType + jsonTag(field)) + } + f.WriteLine(`}`) + } else if modelType.Kind == surface.TypeKind_OBJECT { + f.WriteLine(`type ` + modelType.TypeName + ` map[string]` + modelType.ContentType) + } else { + f.WriteLine(`type ` + modelType.TypeName + ` struct {}`) + } + } + return f.Bytes(), nil +} + +func jsonTag(field *surface.Field) string { + if field.Serialize { + return " `json:" + `"` + field.Name + `,omitempty"` + "`" + } + return "" +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/renderer.go b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/renderer.go new file mode 100644 index 000000000..7c9f9d76f --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-go-generator/renderer.go @@ -0,0 +1,67 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "fmt" + _ "os" + "path/filepath" + + plugins "github.com/googleapis/gnostic/plugins" + surface "github.com/googleapis/gnostic/surface" +) + +// Renderer generates code for a surface.Model. +type Renderer struct { + Model *surface.Model + Package string // package name +} + +// NewServiceRenderer creates a renderer. +func NewServiceRenderer(model *surface.Model) (renderer *Renderer, err error) { + renderer = &Renderer{} + renderer.Model = model + return renderer, nil +} + +// Generate runs the renderer to generate the named files. +func (renderer *Renderer) Render(response *plugins.Response, files []string) (err error) { + for _, filename := range files { + file := &plugins.File{Name: filename} + switch filename { + case "client.go": + file.Data, err = renderer.RenderClient() + case "types.go": + file.Data, err = renderer.RenderTypes() + case "provider.go": + file.Data, err = renderer.RenderProvider() + case "server.go": + file.Data, err = renderer.RenderServer() + case "constants.go": + file.Data, err = renderer.RenderConstants() + default: + file.Data = nil + } + if err != nil { + response.Errors = append(response.Errors, fmt.Sprintf("ERROR %v", err)) + } + // run generated Go files through goimports + if filepath.Ext(file.Name) == ".go" { + file.Data, err = goimports(file.Name, file.Data) + } + response.Files = append(response.Files, file) + } + return +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-summary/README.md b/vendor/github.com/googleapis/gnostic/plugins/gnostic-summary/README.md new file mode 100644 index 000000000..431b18e99 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-summary/README.md @@ -0,0 +1,8 @@ +# gnostic-summary + +This directory contains a `gnostic` plugin that summarizes the contents of an OpenAPI description. + + gnostic bookstore.json --summary-out=- + +Here the `-` in the output path indicates that results are to be written to stdout. +A `.` will write a summary file into the current directory. diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-summary/main.go b/vendor/github.com/googleapis/gnostic/plugins/gnostic-summary/main.go new file mode 100644 index 000000000..8172da3e2 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-summary/main.go @@ -0,0 +1,111 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// gnostic_go_generator is a sample Gnostic plugin that generates Go +// code that supports an API. +package main + +import ( + openapi2 "github.com/googleapis/gnostic/OpenAPIv2" + openapi3 "github.com/googleapis/gnostic/OpenAPIv3" + plugins "github.com/googleapis/gnostic/plugins" + "github.com/googleapis/gnostic/printer" +) + +// generate a simple report of an OpenAPI document's contents +func printDocumentV2(code *printer.Code, document *openapi2.Document) { + code.Print("Swagger: %+v", document.Swagger) + code.Print("Host: %+v", document.Host) + code.Print("BasePath: %+v", document.BasePath) + if document.Info != nil { + code.Print("Info:") + code.Indent() + if document.Info.Title != "" { + code.Print("Title: %s", document.Info.Title) + } + if document.Info.Description != "" { + code.Print("Description: %s", document.Info.Description) + } + if document.Info.Version != "" { + code.Print("Version: %s", document.Info.Version) + } + code.Outdent() + } + code.Print("Paths:") + code.Indent() + for _, pair := range document.Paths.Path { + v := pair.Value + if v.Get != nil { + code.Print("GET %+v", pair.Name) + } + if v.Post != nil { + code.Print("POST %+v", pair.Name) + } + } + code.Outdent() +} + +// generate a simple report of an OpenAPI document's contents +func printDocumentV3(code *printer.Code, document *openapi3.Document) { + code.Print("OpenAPI: %+v", document.Openapi) + code.Print("Servers: %+v", document.Servers) + if document.Info != nil { + code.Print("Info:") + code.Indent() + if document.Info.Title != "" { + code.Print("Title: %s", document.Info.Title) + } + if document.Info.Description != "" { + code.Print("Description: %s", document.Info.Description) + } + if document.Info.Version != "" { + code.Print("Version: %s", document.Info.Version) + } + code.Outdent() + } + code.Print("Paths:") + code.Indent() + for _, pair := range document.Paths.Path { + v := pair.Value + if v.Get != nil { + code.Print("GET %+v", pair.Name) + } + if v.Post != nil { + code.Print("POST %+v", pair.Name) + } + } + code.Outdent() +} + +// This is the main function for the plugin. +func main() { + env, err := plugins.NewEnvironment() + env.RespondAndExitIfError(err) + + code := &printer.Code{} + switch { + case env.Request.Openapi2 != nil: + printDocumentV2(code, env.Request.Openapi2) + case env.Request.Openapi3 != nil: + printDocumentV3(code, env.Request.Openapi3) + default: + } + file := &plugins.File{ + Name: "summary.txt", + Data: []byte(code.String()), + } + env.Response.Files = append(env.Response.Files, file) + + env.RespondAndExit() +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Makefile b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Makefile new file mode 100644 index 000000000..fcd9ea1bc --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Makefile @@ -0,0 +1,11 @@ + +all: + swift build + cp .build/debug/gnostic-swift-generator gnostic-swift-generator + rm -f gnostic-swift-client gnostic-swift-server + ln -s gnostic-swift-generator gnostic-swift-client + ln -s gnostic-swift-generator gnostic-swift-server + +clean: + rm -rf .build Packages + rm -rf gnostic-swift-client gnostic-swift-server gnostic-swift-generator diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Package.swift b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Package.swift new file mode 100644 index 000000000..e7f561b8c --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Package.swift @@ -0,0 +1,26 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import PackageDescription + +let package = Package( + name: "SwiftOpenAPIPlugin", + targets: [ + Target(name: "gnostic-swift-generator", dependencies: ["Gnostic"]), + Target(name: "Gnostic") + ], + dependencies: [ + .Package(url: "https://github.com/apple/swift-protobuf.git", Version(0,9,904)), + ] +) diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/README.md b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/README.md new file mode 100644 index 000000000..afa873980 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/README.md @@ -0,0 +1,15 @@ +# OpenAPI Swift Generator Plugin + +This directory contains an `openapic` plugin that can be used to generate a Swift client library and scaffolding for a Swift server for an API with an OpenAPI description. + +The plugin can be invoked like this: + + openapic bookstore.json --swift_generator_out=Bookstore + +Where `Bookstore` is the name of a directory where the generated code will be written. + +Both client and server code will be generated. + +For example usage, see the [examples/bookstore](examples/bookstore) directory. + +HTTP services are provided by the Kitura library. \ No newline at end of file diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/Gnostic/OpenAPIv2.pb.swift b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/Gnostic/OpenAPIv2.pb.swift new file mode 100644 index 000000000..9001d43eb --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/Gnostic/OpenAPIv2.pb.swift @@ -0,0 +1,8213 @@ +// DO NOT EDIT. +// +// Generated by the Swift generator plugin for the protocol buffer compiler. +// Source: github.com/googleapis/gnostic/OpenAPIv2/OpenAPIv2.proto +// +// For information on using the generated types, please see the documenation: +// https://github.com/apple/swift-protobuf/ + +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// THIS FILE IS AUTOMATICALLY GENERATED. + +import Foundation +import SwiftProtobuf + +// If the compiler emits an error on this type, it is because this file +// was generated by a version of the `protoc` Swift plug-in that is +// incompatible with the version of SwiftProtobuf to which you are linking. +// Please ensure that your are building against the same version of the API +// that was used to generate this file. +fileprivate struct _GeneratedWithProtocGenSwiftVersion: SwiftProtobuf.ProtobufAPIVersionCheck { + struct _2: SwiftProtobuf.ProtobufAPIVersion_2 {} + typealias Version = _2 +} + +public struct Openapi_V2_AdditionalPropertiesItem: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".AdditionalPropertiesItem" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var schema: Openapi_V2_Schema { + get { + if case .schema(let v)? = _storage._oneof {return v} + return Openapi_V2_Schema() + } + set {_uniqueStorage()._oneof = .schema(newValue)} + } + + public var boolean: Bool { + get { + if case .boolean(let v)? = _storage._oneof {return v} + return false + } + set {_uniqueStorage()._oneof = .boolean(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case schema(Openapi_V2_Schema) + case boolean(Bool) + + public static func ==(lhs: Openapi_V2_AdditionalPropertiesItem.OneOf_Oneof, rhs: Openapi_V2_AdditionalPropertiesItem.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.schema(let l), .schema(let r)): return l == r + case (.boolean(let l), .boolean(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V2_Schema? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .schema(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .schema(v)} + case 2: + if _storage._oneof != nil {try decoder.handleConflictingOneOf()} + var v: Bool? + try decoder.decodeSingularBoolField(value: &v) + if let v = v {_storage._oneof = .boolean(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .schema(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .boolean(let v)?: + try visitor.visitSingularBoolField(value: v, fieldNumber: 2) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_Any: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Any" + + public var value: SwiftProtobuf.Google_Protobuf_Any { + get {return _storage._value ?? SwiftProtobuf.Google_Protobuf_Any()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var yaml: String { + get {return _storage._yaml} + set {_uniqueStorage()._yaml = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularMessageField(value: &_storage._value) + case 2: try decoder.decodeSingularStringField(value: &_storage._yaml) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + } + if !_storage._yaml.isEmpty { + try visitor.visitSingularStringField(value: _storage._yaml, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_ApiKeySecurity: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".ApiKeySecurity" + + public var type: String = String() + + public var name: String = String() + + public var `in`: String = String() + + public var description_p: String = String() + + public var vendorExtension: [Openapi_V2_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.type) + case 2: try decoder.decodeSingularStringField(value: &self.name) + case 3: try decoder.decodeSingularStringField(value: &self.`in`) + case 4: try decoder.decodeSingularStringField(value: &self.description_p) + case 5: try decoder.decodeRepeatedMessageField(value: &self.vendorExtension) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.type.isEmpty { + try visitor.visitSingularStringField(value: self.type, fieldNumber: 1) + } + if !self.name.isEmpty { + try visitor.visitSingularStringField(value: self.name, fieldNumber: 2) + } + if !self.`in`.isEmpty { + try visitor.visitSingularStringField(value: self.`in`, fieldNumber: 3) + } + if !self.description_p.isEmpty { + try visitor.visitSingularStringField(value: self.description_p, fieldNumber: 4) + } + if !self.vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: self.vendorExtension, fieldNumber: 5) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V2_BasicAuthenticationSecurity: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".BasicAuthenticationSecurity" + + public var type: String = String() + + public var description_p: String = String() + + public var vendorExtension: [Openapi_V2_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.type) + case 2: try decoder.decodeSingularStringField(value: &self.description_p) + case 3: try decoder.decodeRepeatedMessageField(value: &self.vendorExtension) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.type.isEmpty { + try visitor.visitSingularStringField(value: self.type, fieldNumber: 1) + } + if !self.description_p.isEmpty { + try visitor.visitSingularStringField(value: self.description_p, fieldNumber: 2) + } + if !self.vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: self.vendorExtension, fieldNumber: 3) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V2_BodyParameter: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".BodyParameter" + + /// A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed. + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + /// The name of the parameter. + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Determines the location of the parameter. + public var `in`: String { + get {return _storage._in} + set {_uniqueStorage()._in = newValue} + } + + /// Determines whether or not this parameter is required or optional. + public var required: Bool { + get {return _storage._required} + set {_uniqueStorage()._required = newValue} + } + + public var schema: Openapi_V2_Schema { + get {return _storage._schema ?? Openapi_V2_Schema()} + set {_uniqueStorage()._schema = newValue} + } + /// Returns true if `schema` has been explicitly set. + public var hasSchema: Bool {return _storage._schema != nil} + /// Clears the value of `schema`. Subsequent reads from it will return its default value. + public mutating func clearSchema() {_storage._schema = nil} + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 2: try decoder.decodeSingularStringField(value: &_storage._name) + case 3: try decoder.decodeSingularStringField(value: &_storage._in) + case 4: try decoder.decodeSingularBoolField(value: &_storage._required) + case 5: try decoder.decodeSingularMessageField(value: &_storage._schema) + case 6: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 1) + } + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 2) + } + if !_storage._in.isEmpty { + try visitor.visitSingularStringField(value: _storage._in, fieldNumber: 3) + } + if _storage._required != false { + try visitor.visitSingularBoolField(value: _storage._required, fieldNumber: 4) + } + if let v = _storage._schema { + try visitor.visitSingularMessageField(value: v, fieldNumber: 5) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 6) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Contact information for the owners of the API. +public struct Openapi_V2_Contact: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Contact" + + /// The identifying name of the contact person/organization. + public var name: String = String() + + /// The URL pointing to the contact information. + public var url: String = String() + + /// The email address of the contact person/organization. + public var email: String = String() + + public var vendorExtension: [Openapi_V2_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.name) + case 2: try decoder.decodeSingularStringField(value: &self.url) + case 3: try decoder.decodeSingularStringField(value: &self.email) + case 4: try decoder.decodeRepeatedMessageField(value: &self.vendorExtension) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.name.isEmpty { + try visitor.visitSingularStringField(value: self.name, fieldNumber: 1) + } + if !self.url.isEmpty { + try visitor.visitSingularStringField(value: self.url, fieldNumber: 2) + } + if !self.email.isEmpty { + try visitor.visitSingularStringField(value: self.email, fieldNumber: 3) + } + if !self.vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: self.vendorExtension, fieldNumber: 4) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V2_Default: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Default" + + public var additionalProperties: [Openapi_V2_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// One or more JSON objects describing the schemas being consumed and produced by the API. +public struct Openapi_V2_Definitions: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Definitions" + + public var additionalProperties: [Openapi_V2_NamedSchema] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V2_Document: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Document" + + /// The Swagger version of this document. + public var swagger: String { + get {return _storage._swagger} + set {_uniqueStorage()._swagger = newValue} + } + + public var info: Openapi_V2_Info { + get {return _storage._info ?? Openapi_V2_Info()} + set {_uniqueStorage()._info = newValue} + } + /// Returns true if `info` has been explicitly set. + public var hasInfo: Bool {return _storage._info != nil} + /// Clears the value of `info`. Subsequent reads from it will return its default value. + public mutating func clearInfo() {_storage._info = nil} + + /// The host (name or ip) of the API. Example: 'swagger.io' + public var host: String { + get {return _storage._host} + set {_uniqueStorage()._host = newValue} + } + + /// The base path to the API. Example: '/api'. + public var basePath: String { + get {return _storage._basePath} + set {_uniqueStorage()._basePath = newValue} + } + + /// The transfer protocol of the API. + public var schemes: [String] { + get {return _storage._schemes} + set {_uniqueStorage()._schemes = newValue} + } + + /// A list of MIME types accepted by the API. + public var consumes: [String] { + get {return _storage._consumes} + set {_uniqueStorage()._consumes = newValue} + } + + /// A list of MIME types the API can produce. + public var produces: [String] { + get {return _storage._produces} + set {_uniqueStorage()._produces = newValue} + } + + public var paths: Openapi_V2_Paths { + get {return _storage._paths ?? Openapi_V2_Paths()} + set {_uniqueStorage()._paths = newValue} + } + /// Returns true if `paths` has been explicitly set. + public var hasPaths: Bool {return _storage._paths != nil} + /// Clears the value of `paths`. Subsequent reads from it will return its default value. + public mutating func clearPaths() {_storage._paths = nil} + + public var definitions: Openapi_V2_Definitions { + get {return _storage._definitions ?? Openapi_V2_Definitions()} + set {_uniqueStorage()._definitions = newValue} + } + /// Returns true if `definitions` has been explicitly set. + public var hasDefinitions: Bool {return _storage._definitions != nil} + /// Clears the value of `definitions`. Subsequent reads from it will return its default value. + public mutating func clearDefinitions() {_storage._definitions = nil} + + public var parameters: Openapi_V2_ParameterDefinitions { + get {return _storage._parameters ?? Openapi_V2_ParameterDefinitions()} + set {_uniqueStorage()._parameters = newValue} + } + /// Returns true if `parameters` has been explicitly set. + public var hasParameters: Bool {return _storage._parameters != nil} + /// Clears the value of `parameters`. Subsequent reads from it will return its default value. + public mutating func clearParameters() {_storage._parameters = nil} + + public var responses: Openapi_V2_ResponseDefinitions { + get {return _storage._responses ?? Openapi_V2_ResponseDefinitions()} + set {_uniqueStorage()._responses = newValue} + } + /// Returns true if `responses` has been explicitly set. + public var hasResponses: Bool {return _storage._responses != nil} + /// Clears the value of `responses`. Subsequent reads from it will return its default value. + public mutating func clearResponses() {_storage._responses = nil} + + public var security: [Openapi_V2_SecurityRequirement] { + get {return _storage._security} + set {_uniqueStorage()._security = newValue} + } + + public var securityDefinitions: Openapi_V2_SecurityDefinitions { + get {return _storage._securityDefinitions ?? Openapi_V2_SecurityDefinitions()} + set {_uniqueStorage()._securityDefinitions = newValue} + } + /// Returns true if `securityDefinitions` has been explicitly set. + public var hasSecurityDefinitions: Bool {return _storage._securityDefinitions != nil} + /// Clears the value of `securityDefinitions`. Subsequent reads from it will return its default value. + public mutating func clearSecurityDefinitions() {_storage._securityDefinitions = nil} + + public var tags: [Openapi_V2_Tag] { + get {return _storage._tags} + set {_uniqueStorage()._tags = newValue} + } + + public var externalDocs: Openapi_V2_ExternalDocs { + get {return _storage._externalDocs ?? Openapi_V2_ExternalDocs()} + set {_uniqueStorage()._externalDocs = newValue} + } + /// Returns true if `externalDocs` has been explicitly set. + public var hasExternalDocs: Bool {return _storage._externalDocs != nil} + /// Clears the value of `externalDocs`. Subsequent reads from it will return its default value. + public mutating func clearExternalDocs() {_storage._externalDocs = nil} + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._swagger) + case 2: try decoder.decodeSingularMessageField(value: &_storage._info) + case 3: try decoder.decodeSingularStringField(value: &_storage._host) + case 4: try decoder.decodeSingularStringField(value: &_storage._basePath) + case 5: try decoder.decodeRepeatedStringField(value: &_storage._schemes) + case 6: try decoder.decodeRepeatedStringField(value: &_storage._consumes) + case 7: try decoder.decodeRepeatedStringField(value: &_storage._produces) + case 8: try decoder.decodeSingularMessageField(value: &_storage._paths) + case 9: try decoder.decodeSingularMessageField(value: &_storage._definitions) + case 10: try decoder.decodeSingularMessageField(value: &_storage._parameters) + case 11: try decoder.decodeSingularMessageField(value: &_storage._responses) + case 12: try decoder.decodeRepeatedMessageField(value: &_storage._security) + case 13: try decoder.decodeSingularMessageField(value: &_storage._securityDefinitions) + case 14: try decoder.decodeRepeatedMessageField(value: &_storage._tags) + case 15: try decoder.decodeSingularMessageField(value: &_storage._externalDocs) + case 16: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._swagger.isEmpty { + try visitor.visitSingularStringField(value: _storage._swagger, fieldNumber: 1) + } + if let v = _storage._info { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + if !_storage._host.isEmpty { + try visitor.visitSingularStringField(value: _storage._host, fieldNumber: 3) + } + if !_storage._basePath.isEmpty { + try visitor.visitSingularStringField(value: _storage._basePath, fieldNumber: 4) + } + if !_storage._schemes.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._schemes, fieldNumber: 5) + } + if !_storage._consumes.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._consumes, fieldNumber: 6) + } + if !_storage._produces.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._produces, fieldNumber: 7) + } + if let v = _storage._paths { + try visitor.visitSingularMessageField(value: v, fieldNumber: 8) + } + if let v = _storage._definitions { + try visitor.visitSingularMessageField(value: v, fieldNumber: 9) + } + if let v = _storage._parameters { + try visitor.visitSingularMessageField(value: v, fieldNumber: 10) + } + if let v = _storage._responses { + try visitor.visitSingularMessageField(value: v, fieldNumber: 11) + } + if !_storage._security.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._security, fieldNumber: 12) + } + if let v = _storage._securityDefinitions { + try visitor.visitSingularMessageField(value: v, fieldNumber: 13) + } + if !_storage._tags.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._tags, fieldNumber: 14) + } + if let v = _storage._externalDocs { + try visitor.visitSingularMessageField(value: v, fieldNumber: 15) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 16) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_Examples: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Examples" + + public var additionalProperties: [Openapi_V2_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// information about external documentation +public struct Openapi_V2_ExternalDocs: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".ExternalDocs" + + public var description_p: String = String() + + public var url: String = String() + + public var vendorExtension: [Openapi_V2_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.description_p) + case 2: try decoder.decodeSingularStringField(value: &self.url) + case 3: try decoder.decodeRepeatedMessageField(value: &self.vendorExtension) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.description_p.isEmpty { + try visitor.visitSingularStringField(value: self.description_p, fieldNumber: 1) + } + if !self.url.isEmpty { + try visitor.visitSingularStringField(value: self.url, fieldNumber: 2) + } + if !self.vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: self.vendorExtension, fieldNumber: 3) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// A deterministic version of a JSON Schema object. +public struct Openapi_V2_FileSchema: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".FileSchema" + + public var format: String { + get {return _storage._format} + set {_uniqueStorage()._format = newValue} + } + + public var title: String { + get {return _storage._title} + set {_uniqueStorage()._title = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var `default`: Openapi_V2_Any { + get {return _storage._default ?? Openapi_V2_Any()} + set {_uniqueStorage()._default = newValue} + } + /// Returns true if ``default`` has been explicitly set. + public var hasDefault: Bool {return _storage._default != nil} + /// Clears the value of ``default``. Subsequent reads from it will return its default value. + public mutating func clearDefault() {_storage._default = nil} + + public var required: [String] { + get {return _storage._required} + set {_uniqueStorage()._required = newValue} + } + + public var type: String { + get {return _storage._type} + set {_uniqueStorage()._type = newValue} + } + + public var readOnly: Bool { + get {return _storage._readOnly} + set {_uniqueStorage()._readOnly = newValue} + } + + public var externalDocs: Openapi_V2_ExternalDocs { + get {return _storage._externalDocs ?? Openapi_V2_ExternalDocs()} + set {_uniqueStorage()._externalDocs = newValue} + } + /// Returns true if `externalDocs` has been explicitly set. + public var hasExternalDocs: Bool {return _storage._externalDocs != nil} + /// Clears the value of `externalDocs`. Subsequent reads from it will return its default value. + public mutating func clearExternalDocs() {_storage._externalDocs = nil} + + public var example: Openapi_V2_Any { + get {return _storage._example ?? Openapi_V2_Any()} + set {_uniqueStorage()._example = newValue} + } + /// Returns true if `example` has been explicitly set. + public var hasExample: Bool {return _storage._example != nil} + /// Clears the value of `example`. Subsequent reads from it will return its default value. + public mutating func clearExample() {_storage._example = nil} + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._format) + case 2: try decoder.decodeSingularStringField(value: &_storage._title) + case 3: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 4: try decoder.decodeSingularMessageField(value: &_storage._default) + case 5: try decoder.decodeRepeatedStringField(value: &_storage._required) + case 6: try decoder.decodeSingularStringField(value: &_storage._type) + case 7: try decoder.decodeSingularBoolField(value: &_storage._readOnly) + case 8: try decoder.decodeSingularMessageField(value: &_storage._externalDocs) + case 9: try decoder.decodeSingularMessageField(value: &_storage._example) + case 10: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._format.isEmpty { + try visitor.visitSingularStringField(value: _storage._format, fieldNumber: 1) + } + if !_storage._title.isEmpty { + try visitor.visitSingularStringField(value: _storage._title, fieldNumber: 2) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 3) + } + if let v = _storage._default { + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + } + if !_storage._required.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._required, fieldNumber: 5) + } + if !_storage._type.isEmpty { + try visitor.visitSingularStringField(value: _storage._type, fieldNumber: 6) + } + if _storage._readOnly != false { + try visitor.visitSingularBoolField(value: _storage._readOnly, fieldNumber: 7) + } + if let v = _storage._externalDocs { + try visitor.visitSingularMessageField(value: v, fieldNumber: 8) + } + if let v = _storage._example { + try visitor.visitSingularMessageField(value: v, fieldNumber: 9) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 10) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_FormDataParameterSubSchema: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".FormDataParameterSubSchema" + + /// Determines whether or not this parameter is required or optional. + public var required: Bool { + get {return _storage._required} + set {_uniqueStorage()._required = newValue} + } + + /// Determines the location of the parameter. + public var `in`: String { + get {return _storage._in} + set {_uniqueStorage()._in = newValue} + } + + /// A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed. + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + /// The name of the parameter. + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// allows sending a parameter by name only or with an empty value. + public var allowEmptyValue: Bool { + get {return _storage._allowEmptyValue} + set {_uniqueStorage()._allowEmptyValue = newValue} + } + + public var type: String { + get {return _storage._type} + set {_uniqueStorage()._type = newValue} + } + + public var format: String { + get {return _storage._format} + set {_uniqueStorage()._format = newValue} + } + + public var items: Openapi_V2_PrimitivesItems { + get {return _storage._items ?? Openapi_V2_PrimitivesItems()} + set {_uniqueStorage()._items = newValue} + } + /// Returns true if `items` has been explicitly set. + public var hasItems: Bool {return _storage._items != nil} + /// Clears the value of `items`. Subsequent reads from it will return its default value. + public mutating func clearItems() {_storage._items = nil} + + public var collectionFormat: String { + get {return _storage._collectionFormat} + set {_uniqueStorage()._collectionFormat = newValue} + } + + public var `default`: Openapi_V2_Any { + get {return _storage._default ?? Openapi_V2_Any()} + set {_uniqueStorage()._default = newValue} + } + /// Returns true if ``default`` has been explicitly set. + public var hasDefault: Bool {return _storage._default != nil} + /// Clears the value of ``default``. Subsequent reads from it will return its default value. + public mutating func clearDefault() {_storage._default = nil} + + public var maximum: Double { + get {return _storage._maximum} + set {_uniqueStorage()._maximum = newValue} + } + + public var exclusiveMaximum: Bool { + get {return _storage._exclusiveMaximum} + set {_uniqueStorage()._exclusiveMaximum = newValue} + } + + public var minimum: Double { + get {return _storage._minimum} + set {_uniqueStorage()._minimum = newValue} + } + + public var exclusiveMinimum: Bool { + get {return _storage._exclusiveMinimum} + set {_uniqueStorage()._exclusiveMinimum = newValue} + } + + public var maxLength: Int64 { + get {return _storage._maxLength} + set {_uniqueStorage()._maxLength = newValue} + } + + public var minLength: Int64 { + get {return _storage._minLength} + set {_uniqueStorage()._minLength = newValue} + } + + public var pattern: String { + get {return _storage._pattern} + set {_uniqueStorage()._pattern = newValue} + } + + public var maxItems: Int64 { + get {return _storage._maxItems} + set {_uniqueStorage()._maxItems = newValue} + } + + public var minItems: Int64 { + get {return _storage._minItems} + set {_uniqueStorage()._minItems = newValue} + } + + public var uniqueItems: Bool { + get {return _storage._uniqueItems} + set {_uniqueStorage()._uniqueItems = newValue} + } + + public var `enum`: [Openapi_V2_Any] { + get {return _storage._enum} + set {_uniqueStorage()._enum = newValue} + } + + public var multipleOf: Double { + get {return _storage._multipleOf} + set {_uniqueStorage()._multipleOf = newValue} + } + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularBoolField(value: &_storage._required) + case 2: try decoder.decodeSingularStringField(value: &_storage._in) + case 3: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 4: try decoder.decodeSingularStringField(value: &_storage._name) + case 5: try decoder.decodeSingularBoolField(value: &_storage._allowEmptyValue) + case 6: try decoder.decodeSingularStringField(value: &_storage._type) + case 7: try decoder.decodeSingularStringField(value: &_storage._format) + case 8: try decoder.decodeSingularMessageField(value: &_storage._items) + case 9: try decoder.decodeSingularStringField(value: &_storage._collectionFormat) + case 10: try decoder.decodeSingularMessageField(value: &_storage._default) + case 11: try decoder.decodeSingularDoubleField(value: &_storage._maximum) + case 12: try decoder.decodeSingularBoolField(value: &_storage._exclusiveMaximum) + case 13: try decoder.decodeSingularDoubleField(value: &_storage._minimum) + case 14: try decoder.decodeSingularBoolField(value: &_storage._exclusiveMinimum) + case 15: try decoder.decodeSingularInt64Field(value: &_storage._maxLength) + case 16: try decoder.decodeSingularInt64Field(value: &_storage._minLength) + case 17: try decoder.decodeSingularStringField(value: &_storage._pattern) + case 18: try decoder.decodeSingularInt64Field(value: &_storage._maxItems) + case 19: try decoder.decodeSingularInt64Field(value: &_storage._minItems) + case 20: try decoder.decodeSingularBoolField(value: &_storage._uniqueItems) + case 21: try decoder.decodeRepeatedMessageField(value: &_storage._enum) + case 22: try decoder.decodeSingularDoubleField(value: &_storage._multipleOf) + case 23: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if _storage._required != false { + try visitor.visitSingularBoolField(value: _storage._required, fieldNumber: 1) + } + if !_storage._in.isEmpty { + try visitor.visitSingularStringField(value: _storage._in, fieldNumber: 2) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 3) + } + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 4) + } + if _storage._allowEmptyValue != false { + try visitor.visitSingularBoolField(value: _storage._allowEmptyValue, fieldNumber: 5) + } + if !_storage._type.isEmpty { + try visitor.visitSingularStringField(value: _storage._type, fieldNumber: 6) + } + if !_storage._format.isEmpty { + try visitor.visitSingularStringField(value: _storage._format, fieldNumber: 7) + } + if let v = _storage._items { + try visitor.visitSingularMessageField(value: v, fieldNumber: 8) + } + if !_storage._collectionFormat.isEmpty { + try visitor.visitSingularStringField(value: _storage._collectionFormat, fieldNumber: 9) + } + if let v = _storage._default { + try visitor.visitSingularMessageField(value: v, fieldNumber: 10) + } + if _storage._maximum != 0 { + try visitor.visitSingularDoubleField(value: _storage._maximum, fieldNumber: 11) + } + if _storage._exclusiveMaximum != false { + try visitor.visitSingularBoolField(value: _storage._exclusiveMaximum, fieldNumber: 12) + } + if _storage._minimum != 0 { + try visitor.visitSingularDoubleField(value: _storage._minimum, fieldNumber: 13) + } + if _storage._exclusiveMinimum != false { + try visitor.visitSingularBoolField(value: _storage._exclusiveMinimum, fieldNumber: 14) + } + if _storage._maxLength != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxLength, fieldNumber: 15) + } + if _storage._minLength != 0 { + try visitor.visitSingularInt64Field(value: _storage._minLength, fieldNumber: 16) + } + if !_storage._pattern.isEmpty { + try visitor.visitSingularStringField(value: _storage._pattern, fieldNumber: 17) + } + if _storage._maxItems != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxItems, fieldNumber: 18) + } + if _storage._minItems != 0 { + try visitor.visitSingularInt64Field(value: _storage._minItems, fieldNumber: 19) + } + if _storage._uniqueItems != false { + try visitor.visitSingularBoolField(value: _storage._uniqueItems, fieldNumber: 20) + } + if !_storage._enum.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._enum, fieldNumber: 21) + } + if _storage._multipleOf != 0 { + try visitor.visitSingularDoubleField(value: _storage._multipleOf, fieldNumber: 22) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 23) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_Header: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Header" + + public var type: String { + get {return _storage._type} + set {_uniqueStorage()._type = newValue} + } + + public var format: String { + get {return _storage._format} + set {_uniqueStorage()._format = newValue} + } + + public var items: Openapi_V2_PrimitivesItems { + get {return _storage._items ?? Openapi_V2_PrimitivesItems()} + set {_uniqueStorage()._items = newValue} + } + /// Returns true if `items` has been explicitly set. + public var hasItems: Bool {return _storage._items != nil} + /// Clears the value of `items`. Subsequent reads from it will return its default value. + public mutating func clearItems() {_storage._items = nil} + + public var collectionFormat: String { + get {return _storage._collectionFormat} + set {_uniqueStorage()._collectionFormat = newValue} + } + + public var `default`: Openapi_V2_Any { + get {return _storage._default ?? Openapi_V2_Any()} + set {_uniqueStorage()._default = newValue} + } + /// Returns true if ``default`` has been explicitly set. + public var hasDefault: Bool {return _storage._default != nil} + /// Clears the value of ``default``. Subsequent reads from it will return its default value. + public mutating func clearDefault() {_storage._default = nil} + + public var maximum: Double { + get {return _storage._maximum} + set {_uniqueStorage()._maximum = newValue} + } + + public var exclusiveMaximum: Bool { + get {return _storage._exclusiveMaximum} + set {_uniqueStorage()._exclusiveMaximum = newValue} + } + + public var minimum: Double { + get {return _storage._minimum} + set {_uniqueStorage()._minimum = newValue} + } + + public var exclusiveMinimum: Bool { + get {return _storage._exclusiveMinimum} + set {_uniqueStorage()._exclusiveMinimum = newValue} + } + + public var maxLength: Int64 { + get {return _storage._maxLength} + set {_uniqueStorage()._maxLength = newValue} + } + + public var minLength: Int64 { + get {return _storage._minLength} + set {_uniqueStorage()._minLength = newValue} + } + + public var pattern: String { + get {return _storage._pattern} + set {_uniqueStorage()._pattern = newValue} + } + + public var maxItems: Int64 { + get {return _storage._maxItems} + set {_uniqueStorage()._maxItems = newValue} + } + + public var minItems: Int64 { + get {return _storage._minItems} + set {_uniqueStorage()._minItems = newValue} + } + + public var uniqueItems: Bool { + get {return _storage._uniqueItems} + set {_uniqueStorage()._uniqueItems = newValue} + } + + public var `enum`: [Openapi_V2_Any] { + get {return _storage._enum} + set {_uniqueStorage()._enum = newValue} + } + + public var multipleOf: Double { + get {return _storage._multipleOf} + set {_uniqueStorage()._multipleOf = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._type) + case 2: try decoder.decodeSingularStringField(value: &_storage._format) + case 3: try decoder.decodeSingularMessageField(value: &_storage._items) + case 4: try decoder.decodeSingularStringField(value: &_storage._collectionFormat) + case 5: try decoder.decodeSingularMessageField(value: &_storage._default) + case 6: try decoder.decodeSingularDoubleField(value: &_storage._maximum) + case 7: try decoder.decodeSingularBoolField(value: &_storage._exclusiveMaximum) + case 8: try decoder.decodeSingularDoubleField(value: &_storage._minimum) + case 9: try decoder.decodeSingularBoolField(value: &_storage._exclusiveMinimum) + case 10: try decoder.decodeSingularInt64Field(value: &_storage._maxLength) + case 11: try decoder.decodeSingularInt64Field(value: &_storage._minLength) + case 12: try decoder.decodeSingularStringField(value: &_storage._pattern) + case 13: try decoder.decodeSingularInt64Field(value: &_storage._maxItems) + case 14: try decoder.decodeSingularInt64Field(value: &_storage._minItems) + case 15: try decoder.decodeSingularBoolField(value: &_storage._uniqueItems) + case 16: try decoder.decodeRepeatedMessageField(value: &_storage._enum) + case 17: try decoder.decodeSingularDoubleField(value: &_storage._multipleOf) + case 18: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 19: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._type.isEmpty { + try visitor.visitSingularStringField(value: _storage._type, fieldNumber: 1) + } + if !_storage._format.isEmpty { + try visitor.visitSingularStringField(value: _storage._format, fieldNumber: 2) + } + if let v = _storage._items { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if !_storage._collectionFormat.isEmpty { + try visitor.visitSingularStringField(value: _storage._collectionFormat, fieldNumber: 4) + } + if let v = _storage._default { + try visitor.visitSingularMessageField(value: v, fieldNumber: 5) + } + if _storage._maximum != 0 { + try visitor.visitSingularDoubleField(value: _storage._maximum, fieldNumber: 6) + } + if _storage._exclusiveMaximum != false { + try visitor.visitSingularBoolField(value: _storage._exclusiveMaximum, fieldNumber: 7) + } + if _storage._minimum != 0 { + try visitor.visitSingularDoubleField(value: _storage._minimum, fieldNumber: 8) + } + if _storage._exclusiveMinimum != false { + try visitor.visitSingularBoolField(value: _storage._exclusiveMinimum, fieldNumber: 9) + } + if _storage._maxLength != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxLength, fieldNumber: 10) + } + if _storage._minLength != 0 { + try visitor.visitSingularInt64Field(value: _storage._minLength, fieldNumber: 11) + } + if !_storage._pattern.isEmpty { + try visitor.visitSingularStringField(value: _storage._pattern, fieldNumber: 12) + } + if _storage._maxItems != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxItems, fieldNumber: 13) + } + if _storage._minItems != 0 { + try visitor.visitSingularInt64Field(value: _storage._minItems, fieldNumber: 14) + } + if _storage._uniqueItems != false { + try visitor.visitSingularBoolField(value: _storage._uniqueItems, fieldNumber: 15) + } + if !_storage._enum.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._enum, fieldNumber: 16) + } + if _storage._multipleOf != 0 { + try visitor.visitSingularDoubleField(value: _storage._multipleOf, fieldNumber: 17) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 18) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 19) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_HeaderParameterSubSchema: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".HeaderParameterSubSchema" + + /// Determines whether or not this parameter is required or optional. + public var required: Bool { + get {return _storage._required} + set {_uniqueStorage()._required = newValue} + } + + /// Determines the location of the parameter. + public var `in`: String { + get {return _storage._in} + set {_uniqueStorage()._in = newValue} + } + + /// A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed. + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + /// The name of the parameter. + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + public var type: String { + get {return _storage._type} + set {_uniqueStorage()._type = newValue} + } + + public var format: String { + get {return _storage._format} + set {_uniqueStorage()._format = newValue} + } + + public var items: Openapi_V2_PrimitivesItems { + get {return _storage._items ?? Openapi_V2_PrimitivesItems()} + set {_uniqueStorage()._items = newValue} + } + /// Returns true if `items` has been explicitly set. + public var hasItems: Bool {return _storage._items != nil} + /// Clears the value of `items`. Subsequent reads from it will return its default value. + public mutating func clearItems() {_storage._items = nil} + + public var collectionFormat: String { + get {return _storage._collectionFormat} + set {_uniqueStorage()._collectionFormat = newValue} + } + + public var `default`: Openapi_V2_Any { + get {return _storage._default ?? Openapi_V2_Any()} + set {_uniqueStorage()._default = newValue} + } + /// Returns true if ``default`` has been explicitly set. + public var hasDefault: Bool {return _storage._default != nil} + /// Clears the value of ``default``. Subsequent reads from it will return its default value. + public mutating func clearDefault() {_storage._default = nil} + + public var maximum: Double { + get {return _storage._maximum} + set {_uniqueStorage()._maximum = newValue} + } + + public var exclusiveMaximum: Bool { + get {return _storage._exclusiveMaximum} + set {_uniqueStorage()._exclusiveMaximum = newValue} + } + + public var minimum: Double { + get {return _storage._minimum} + set {_uniqueStorage()._minimum = newValue} + } + + public var exclusiveMinimum: Bool { + get {return _storage._exclusiveMinimum} + set {_uniqueStorage()._exclusiveMinimum = newValue} + } + + public var maxLength: Int64 { + get {return _storage._maxLength} + set {_uniqueStorage()._maxLength = newValue} + } + + public var minLength: Int64 { + get {return _storage._minLength} + set {_uniqueStorage()._minLength = newValue} + } + + public var pattern: String { + get {return _storage._pattern} + set {_uniqueStorage()._pattern = newValue} + } + + public var maxItems: Int64 { + get {return _storage._maxItems} + set {_uniqueStorage()._maxItems = newValue} + } + + public var minItems: Int64 { + get {return _storage._minItems} + set {_uniqueStorage()._minItems = newValue} + } + + public var uniqueItems: Bool { + get {return _storage._uniqueItems} + set {_uniqueStorage()._uniqueItems = newValue} + } + + public var `enum`: [Openapi_V2_Any] { + get {return _storage._enum} + set {_uniqueStorage()._enum = newValue} + } + + public var multipleOf: Double { + get {return _storage._multipleOf} + set {_uniqueStorage()._multipleOf = newValue} + } + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularBoolField(value: &_storage._required) + case 2: try decoder.decodeSingularStringField(value: &_storage._in) + case 3: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 4: try decoder.decodeSingularStringField(value: &_storage._name) + case 5: try decoder.decodeSingularStringField(value: &_storage._type) + case 6: try decoder.decodeSingularStringField(value: &_storage._format) + case 7: try decoder.decodeSingularMessageField(value: &_storage._items) + case 8: try decoder.decodeSingularStringField(value: &_storage._collectionFormat) + case 9: try decoder.decodeSingularMessageField(value: &_storage._default) + case 10: try decoder.decodeSingularDoubleField(value: &_storage._maximum) + case 11: try decoder.decodeSingularBoolField(value: &_storage._exclusiveMaximum) + case 12: try decoder.decodeSingularDoubleField(value: &_storage._minimum) + case 13: try decoder.decodeSingularBoolField(value: &_storage._exclusiveMinimum) + case 14: try decoder.decodeSingularInt64Field(value: &_storage._maxLength) + case 15: try decoder.decodeSingularInt64Field(value: &_storage._minLength) + case 16: try decoder.decodeSingularStringField(value: &_storage._pattern) + case 17: try decoder.decodeSingularInt64Field(value: &_storage._maxItems) + case 18: try decoder.decodeSingularInt64Field(value: &_storage._minItems) + case 19: try decoder.decodeSingularBoolField(value: &_storage._uniqueItems) + case 20: try decoder.decodeRepeatedMessageField(value: &_storage._enum) + case 21: try decoder.decodeSingularDoubleField(value: &_storage._multipleOf) + case 22: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if _storage._required != false { + try visitor.visitSingularBoolField(value: _storage._required, fieldNumber: 1) + } + if !_storage._in.isEmpty { + try visitor.visitSingularStringField(value: _storage._in, fieldNumber: 2) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 3) + } + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 4) + } + if !_storage._type.isEmpty { + try visitor.visitSingularStringField(value: _storage._type, fieldNumber: 5) + } + if !_storage._format.isEmpty { + try visitor.visitSingularStringField(value: _storage._format, fieldNumber: 6) + } + if let v = _storage._items { + try visitor.visitSingularMessageField(value: v, fieldNumber: 7) + } + if !_storage._collectionFormat.isEmpty { + try visitor.visitSingularStringField(value: _storage._collectionFormat, fieldNumber: 8) + } + if let v = _storage._default { + try visitor.visitSingularMessageField(value: v, fieldNumber: 9) + } + if _storage._maximum != 0 { + try visitor.visitSingularDoubleField(value: _storage._maximum, fieldNumber: 10) + } + if _storage._exclusiveMaximum != false { + try visitor.visitSingularBoolField(value: _storage._exclusiveMaximum, fieldNumber: 11) + } + if _storage._minimum != 0 { + try visitor.visitSingularDoubleField(value: _storage._minimum, fieldNumber: 12) + } + if _storage._exclusiveMinimum != false { + try visitor.visitSingularBoolField(value: _storage._exclusiveMinimum, fieldNumber: 13) + } + if _storage._maxLength != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxLength, fieldNumber: 14) + } + if _storage._minLength != 0 { + try visitor.visitSingularInt64Field(value: _storage._minLength, fieldNumber: 15) + } + if !_storage._pattern.isEmpty { + try visitor.visitSingularStringField(value: _storage._pattern, fieldNumber: 16) + } + if _storage._maxItems != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxItems, fieldNumber: 17) + } + if _storage._minItems != 0 { + try visitor.visitSingularInt64Field(value: _storage._minItems, fieldNumber: 18) + } + if _storage._uniqueItems != false { + try visitor.visitSingularBoolField(value: _storage._uniqueItems, fieldNumber: 19) + } + if !_storage._enum.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._enum, fieldNumber: 20) + } + if _storage._multipleOf != 0 { + try visitor.visitSingularDoubleField(value: _storage._multipleOf, fieldNumber: 21) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 22) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_Headers: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Headers" + + public var additionalProperties: [Openapi_V2_NamedHeader] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// General information about the API. +public struct Openapi_V2_Info: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Info" + + /// A unique and precise title of the API. + public var title: String { + get {return _storage._title} + set {_uniqueStorage()._title = newValue} + } + + /// A semantic version number of the API. + public var version: String { + get {return _storage._version} + set {_uniqueStorage()._version = newValue} + } + + /// A longer description of the API. Should be different from the title. GitHub Flavored Markdown is allowed. + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + /// The terms of service for the API. + public var termsOfService: String { + get {return _storage._termsOfService} + set {_uniqueStorage()._termsOfService = newValue} + } + + public var contact: Openapi_V2_Contact { + get {return _storage._contact ?? Openapi_V2_Contact()} + set {_uniqueStorage()._contact = newValue} + } + /// Returns true if `contact` has been explicitly set. + public var hasContact: Bool {return _storage._contact != nil} + /// Clears the value of `contact`. Subsequent reads from it will return its default value. + public mutating func clearContact() {_storage._contact = nil} + + public var license: Openapi_V2_License { + get {return _storage._license ?? Openapi_V2_License()} + set {_uniqueStorage()._license = newValue} + } + /// Returns true if `license` has been explicitly set. + public var hasLicense: Bool {return _storage._license != nil} + /// Clears the value of `license`. Subsequent reads from it will return its default value. + public mutating func clearLicense() {_storage._license = nil} + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._title) + case 2: try decoder.decodeSingularStringField(value: &_storage._version) + case 3: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 4: try decoder.decodeSingularStringField(value: &_storage._termsOfService) + case 5: try decoder.decodeSingularMessageField(value: &_storage._contact) + case 6: try decoder.decodeSingularMessageField(value: &_storage._license) + case 7: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._title.isEmpty { + try visitor.visitSingularStringField(value: _storage._title, fieldNumber: 1) + } + if !_storage._version.isEmpty { + try visitor.visitSingularStringField(value: _storage._version, fieldNumber: 2) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 3) + } + if !_storage._termsOfService.isEmpty { + try visitor.visitSingularStringField(value: _storage._termsOfService, fieldNumber: 4) + } + if let v = _storage._contact { + try visitor.visitSingularMessageField(value: v, fieldNumber: 5) + } + if let v = _storage._license { + try visitor.visitSingularMessageField(value: v, fieldNumber: 6) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 7) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_ItemsItem: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".ItemsItem" + + public var schema: [Openapi_V2_Schema] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.schema) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.schema.isEmpty { + try visitor.visitRepeatedMessageField(value: self.schema, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V2_JsonReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".JsonReference" + + public var ref: String = String() + + public var description_p: String = String() + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.ref) + case 2: try decoder.decodeSingularStringField(value: &self.description_p) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.ref.isEmpty { + try visitor.visitSingularStringField(value: self.ref, fieldNumber: 1) + } + if !self.description_p.isEmpty { + try visitor.visitSingularStringField(value: self.description_p, fieldNumber: 2) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V2_License: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".License" + + /// The name of the license type. It's encouraged to use an OSI compatible license. + public var name: String = String() + + /// The URL pointing to the license. + public var url: String = String() + + public var vendorExtension: [Openapi_V2_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.name) + case 2: try decoder.decodeSingularStringField(value: &self.url) + case 3: try decoder.decodeRepeatedMessageField(value: &self.vendorExtension) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.name.isEmpty { + try visitor.visitSingularStringField(value: self.name, fieldNumber: 1) + } + if !self.url.isEmpty { + try visitor.visitSingularStringField(value: self.url, fieldNumber: 2) + } + if !self.vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: self.vendorExtension, fieldNumber: 3) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Automatically-generated message used to represent maps of Any as ordered (name,value) pairs. +public struct Openapi_V2_NamedAny: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedAny" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V2_Any { + get {return _storage._value ?? Openapi_V2_Any()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of Header as ordered (name,value) pairs. +public struct Openapi_V2_NamedHeader: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedHeader" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V2_Header { + get {return _storage._value ?? Openapi_V2_Header()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of Parameter as ordered (name,value) pairs. +public struct Openapi_V2_NamedParameter: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedParameter" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V2_Parameter { + get {return _storage._value ?? Openapi_V2_Parameter()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of PathItem as ordered (name,value) pairs. +public struct Openapi_V2_NamedPathItem: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedPathItem" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V2_PathItem { + get {return _storage._value ?? Openapi_V2_PathItem()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of Response as ordered (name,value) pairs. +public struct Openapi_V2_NamedResponse: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedResponse" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V2_Response { + get {return _storage._value ?? Openapi_V2_Response()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of ResponseValue as ordered (name,value) pairs. +public struct Openapi_V2_NamedResponseValue: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedResponseValue" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V2_ResponseValue { + get {return _storage._value ?? Openapi_V2_ResponseValue()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of Schema as ordered (name,value) pairs. +public struct Openapi_V2_NamedSchema: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedSchema" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V2_Schema { + get {return _storage._value ?? Openapi_V2_Schema()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of SecurityDefinitionsItem as ordered (name,value) pairs. +public struct Openapi_V2_NamedSecurityDefinitionsItem: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedSecurityDefinitionsItem" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V2_SecurityDefinitionsItem { + get {return _storage._value ?? Openapi_V2_SecurityDefinitionsItem()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of string as ordered (name,value) pairs. +public struct Openapi_V2_NamedString: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedString" + + /// Map key + public var name: String = String() + + /// Mapped value + public var value: String = String() + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.name) + case 2: try decoder.decodeSingularStringField(value: &self.value) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.name.isEmpty { + try visitor.visitSingularStringField(value: self.name, fieldNumber: 1) + } + if !self.value.isEmpty { + try visitor.visitSingularStringField(value: self.value, fieldNumber: 2) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Automatically-generated message used to represent maps of StringArray as ordered (name,value) pairs. +public struct Openapi_V2_NamedStringArray: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedStringArray" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V2_StringArray { + get {return _storage._value ?? Openapi_V2_StringArray()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_NonBodyParameter: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NonBodyParameter" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var headerParameterSubSchema: Openapi_V2_HeaderParameterSubSchema { + get { + if case .headerParameterSubSchema(let v)? = _storage._oneof {return v} + return Openapi_V2_HeaderParameterSubSchema() + } + set {_uniqueStorage()._oneof = .headerParameterSubSchema(newValue)} + } + + public var formDataParameterSubSchema: Openapi_V2_FormDataParameterSubSchema { + get { + if case .formDataParameterSubSchema(let v)? = _storage._oneof {return v} + return Openapi_V2_FormDataParameterSubSchema() + } + set {_uniqueStorage()._oneof = .formDataParameterSubSchema(newValue)} + } + + public var queryParameterSubSchema: Openapi_V2_QueryParameterSubSchema { + get { + if case .queryParameterSubSchema(let v)? = _storage._oneof {return v} + return Openapi_V2_QueryParameterSubSchema() + } + set {_uniqueStorage()._oneof = .queryParameterSubSchema(newValue)} + } + + public var pathParameterSubSchema: Openapi_V2_PathParameterSubSchema { + get { + if case .pathParameterSubSchema(let v)? = _storage._oneof {return v} + return Openapi_V2_PathParameterSubSchema() + } + set {_uniqueStorage()._oneof = .pathParameterSubSchema(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case headerParameterSubSchema(Openapi_V2_HeaderParameterSubSchema) + case formDataParameterSubSchema(Openapi_V2_FormDataParameterSubSchema) + case queryParameterSubSchema(Openapi_V2_QueryParameterSubSchema) + case pathParameterSubSchema(Openapi_V2_PathParameterSubSchema) + + public static func ==(lhs: Openapi_V2_NonBodyParameter.OneOf_Oneof, rhs: Openapi_V2_NonBodyParameter.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.headerParameterSubSchema(let l), .headerParameterSubSchema(let r)): return l == r + case (.formDataParameterSubSchema(let l), .formDataParameterSubSchema(let r)): return l == r + case (.queryParameterSubSchema(let l), .queryParameterSubSchema(let r)): return l == r + case (.pathParameterSubSchema(let l), .pathParameterSubSchema(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V2_HeaderParameterSubSchema? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .headerParameterSubSchema(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .headerParameterSubSchema(v)} + case 2: + var v: Openapi_V2_FormDataParameterSubSchema? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .formDataParameterSubSchema(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .formDataParameterSubSchema(v)} + case 3: + var v: Openapi_V2_QueryParameterSubSchema? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .queryParameterSubSchema(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .queryParameterSubSchema(v)} + case 4: + var v: Openapi_V2_PathParameterSubSchema? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .pathParameterSubSchema(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .pathParameterSubSchema(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .headerParameterSubSchema(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .formDataParameterSubSchema(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + case .queryParameterSubSchema(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + case .pathParameterSubSchema(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_Oauth2AccessCodeSecurity: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Oauth2AccessCodeSecurity" + + public var type: String { + get {return _storage._type} + set {_uniqueStorage()._type = newValue} + } + + public var flow: String { + get {return _storage._flow} + set {_uniqueStorage()._flow = newValue} + } + + public var scopes: Openapi_V2_Oauth2Scopes { + get {return _storage._scopes ?? Openapi_V2_Oauth2Scopes()} + set {_uniqueStorage()._scopes = newValue} + } + /// Returns true if `scopes` has been explicitly set. + public var hasScopes: Bool {return _storage._scopes != nil} + /// Clears the value of `scopes`. Subsequent reads from it will return its default value. + public mutating func clearScopes() {_storage._scopes = nil} + + public var authorizationURL: String { + get {return _storage._authorizationURL} + set {_uniqueStorage()._authorizationURL = newValue} + } + + public var tokenURL: String { + get {return _storage._tokenURL} + set {_uniqueStorage()._tokenURL = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._type) + case 2: try decoder.decodeSingularStringField(value: &_storage._flow) + case 3: try decoder.decodeSingularMessageField(value: &_storage._scopes) + case 4: try decoder.decodeSingularStringField(value: &_storage._authorizationURL) + case 5: try decoder.decodeSingularStringField(value: &_storage._tokenURL) + case 6: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 7: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._type.isEmpty { + try visitor.visitSingularStringField(value: _storage._type, fieldNumber: 1) + } + if !_storage._flow.isEmpty { + try visitor.visitSingularStringField(value: _storage._flow, fieldNumber: 2) + } + if let v = _storage._scopes { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if !_storage._authorizationURL.isEmpty { + try visitor.visitSingularStringField(value: _storage._authorizationURL, fieldNumber: 4) + } + if !_storage._tokenURL.isEmpty { + try visitor.visitSingularStringField(value: _storage._tokenURL, fieldNumber: 5) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 6) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 7) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_Oauth2ApplicationSecurity: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Oauth2ApplicationSecurity" + + public var type: String { + get {return _storage._type} + set {_uniqueStorage()._type = newValue} + } + + public var flow: String { + get {return _storage._flow} + set {_uniqueStorage()._flow = newValue} + } + + public var scopes: Openapi_V2_Oauth2Scopes { + get {return _storage._scopes ?? Openapi_V2_Oauth2Scopes()} + set {_uniqueStorage()._scopes = newValue} + } + /// Returns true if `scopes` has been explicitly set. + public var hasScopes: Bool {return _storage._scopes != nil} + /// Clears the value of `scopes`. Subsequent reads from it will return its default value. + public mutating func clearScopes() {_storage._scopes = nil} + + public var tokenURL: String { + get {return _storage._tokenURL} + set {_uniqueStorage()._tokenURL = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._type) + case 2: try decoder.decodeSingularStringField(value: &_storage._flow) + case 3: try decoder.decodeSingularMessageField(value: &_storage._scopes) + case 4: try decoder.decodeSingularStringField(value: &_storage._tokenURL) + case 5: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 6: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._type.isEmpty { + try visitor.visitSingularStringField(value: _storage._type, fieldNumber: 1) + } + if !_storage._flow.isEmpty { + try visitor.visitSingularStringField(value: _storage._flow, fieldNumber: 2) + } + if let v = _storage._scopes { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if !_storage._tokenURL.isEmpty { + try visitor.visitSingularStringField(value: _storage._tokenURL, fieldNumber: 4) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 5) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 6) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_Oauth2ImplicitSecurity: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Oauth2ImplicitSecurity" + + public var type: String { + get {return _storage._type} + set {_uniqueStorage()._type = newValue} + } + + public var flow: String { + get {return _storage._flow} + set {_uniqueStorage()._flow = newValue} + } + + public var scopes: Openapi_V2_Oauth2Scopes { + get {return _storage._scopes ?? Openapi_V2_Oauth2Scopes()} + set {_uniqueStorage()._scopes = newValue} + } + /// Returns true if `scopes` has been explicitly set. + public var hasScopes: Bool {return _storage._scopes != nil} + /// Clears the value of `scopes`. Subsequent reads from it will return its default value. + public mutating func clearScopes() {_storage._scopes = nil} + + public var authorizationURL: String { + get {return _storage._authorizationURL} + set {_uniqueStorage()._authorizationURL = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._type) + case 2: try decoder.decodeSingularStringField(value: &_storage._flow) + case 3: try decoder.decodeSingularMessageField(value: &_storage._scopes) + case 4: try decoder.decodeSingularStringField(value: &_storage._authorizationURL) + case 5: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 6: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._type.isEmpty { + try visitor.visitSingularStringField(value: _storage._type, fieldNumber: 1) + } + if !_storage._flow.isEmpty { + try visitor.visitSingularStringField(value: _storage._flow, fieldNumber: 2) + } + if let v = _storage._scopes { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if !_storage._authorizationURL.isEmpty { + try visitor.visitSingularStringField(value: _storage._authorizationURL, fieldNumber: 4) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 5) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 6) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_Oauth2PasswordSecurity: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Oauth2PasswordSecurity" + + public var type: String { + get {return _storage._type} + set {_uniqueStorage()._type = newValue} + } + + public var flow: String { + get {return _storage._flow} + set {_uniqueStorage()._flow = newValue} + } + + public var scopes: Openapi_V2_Oauth2Scopes { + get {return _storage._scopes ?? Openapi_V2_Oauth2Scopes()} + set {_uniqueStorage()._scopes = newValue} + } + /// Returns true if `scopes` has been explicitly set. + public var hasScopes: Bool {return _storage._scopes != nil} + /// Clears the value of `scopes`. Subsequent reads from it will return its default value. + public mutating func clearScopes() {_storage._scopes = nil} + + public var tokenURL: String { + get {return _storage._tokenURL} + set {_uniqueStorage()._tokenURL = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._type) + case 2: try decoder.decodeSingularStringField(value: &_storage._flow) + case 3: try decoder.decodeSingularMessageField(value: &_storage._scopes) + case 4: try decoder.decodeSingularStringField(value: &_storage._tokenURL) + case 5: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 6: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._type.isEmpty { + try visitor.visitSingularStringField(value: _storage._type, fieldNumber: 1) + } + if !_storage._flow.isEmpty { + try visitor.visitSingularStringField(value: _storage._flow, fieldNumber: 2) + } + if let v = _storage._scopes { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if !_storage._tokenURL.isEmpty { + try visitor.visitSingularStringField(value: _storage._tokenURL, fieldNumber: 4) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 5) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 6) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_Oauth2Scopes: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Oauth2Scopes" + + public var additionalProperties: [Openapi_V2_NamedString] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V2_Operation: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Operation" + + public var tags: [String] { + get {return _storage._tags} + set {_uniqueStorage()._tags = newValue} + } + + /// A brief summary of the operation. + public var summary: String { + get {return _storage._summary} + set {_uniqueStorage()._summary = newValue} + } + + /// A longer description of the operation, GitHub Flavored Markdown is allowed. + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var externalDocs: Openapi_V2_ExternalDocs { + get {return _storage._externalDocs ?? Openapi_V2_ExternalDocs()} + set {_uniqueStorage()._externalDocs = newValue} + } + /// Returns true if `externalDocs` has been explicitly set. + public var hasExternalDocs: Bool {return _storage._externalDocs != nil} + /// Clears the value of `externalDocs`. Subsequent reads from it will return its default value. + public mutating func clearExternalDocs() {_storage._externalDocs = nil} + + /// A unique identifier of the operation. + public var operationID: String { + get {return _storage._operationID} + set {_uniqueStorage()._operationID = newValue} + } + + /// A list of MIME types the API can produce. + public var produces: [String] { + get {return _storage._produces} + set {_uniqueStorage()._produces = newValue} + } + + /// A list of MIME types the API can consume. + public var consumes: [String] { + get {return _storage._consumes} + set {_uniqueStorage()._consumes = newValue} + } + + /// The parameters needed to send a valid API call. + public var parameters: [Openapi_V2_ParametersItem] { + get {return _storage._parameters} + set {_uniqueStorage()._parameters = newValue} + } + + public var responses: Openapi_V2_Responses { + get {return _storage._responses ?? Openapi_V2_Responses()} + set {_uniqueStorage()._responses = newValue} + } + /// Returns true if `responses` has been explicitly set. + public var hasResponses: Bool {return _storage._responses != nil} + /// Clears the value of `responses`. Subsequent reads from it will return its default value. + public mutating func clearResponses() {_storage._responses = nil} + + /// The transfer protocol of the API. + public var schemes: [String] { + get {return _storage._schemes} + set {_uniqueStorage()._schemes = newValue} + } + + public var deprecated: Bool { + get {return _storage._deprecated} + set {_uniqueStorage()._deprecated = newValue} + } + + public var security: [Openapi_V2_SecurityRequirement] { + get {return _storage._security} + set {_uniqueStorage()._security = newValue} + } + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedStringField(value: &_storage._tags) + case 2: try decoder.decodeSingularStringField(value: &_storage._summary) + case 3: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 4: try decoder.decodeSingularMessageField(value: &_storage._externalDocs) + case 5: try decoder.decodeSingularStringField(value: &_storage._operationID) + case 6: try decoder.decodeRepeatedStringField(value: &_storage._produces) + case 7: try decoder.decodeRepeatedStringField(value: &_storage._consumes) + case 8: try decoder.decodeRepeatedMessageField(value: &_storage._parameters) + case 9: try decoder.decodeSingularMessageField(value: &_storage._responses) + case 10: try decoder.decodeRepeatedStringField(value: &_storage._schemes) + case 11: try decoder.decodeSingularBoolField(value: &_storage._deprecated) + case 12: try decoder.decodeRepeatedMessageField(value: &_storage._security) + case 13: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._tags.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._tags, fieldNumber: 1) + } + if !_storage._summary.isEmpty { + try visitor.visitSingularStringField(value: _storage._summary, fieldNumber: 2) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 3) + } + if let v = _storage._externalDocs { + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + } + if !_storage._operationID.isEmpty { + try visitor.visitSingularStringField(value: _storage._operationID, fieldNumber: 5) + } + if !_storage._produces.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._produces, fieldNumber: 6) + } + if !_storage._consumes.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._consumes, fieldNumber: 7) + } + if !_storage._parameters.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._parameters, fieldNumber: 8) + } + if let v = _storage._responses { + try visitor.visitSingularMessageField(value: v, fieldNumber: 9) + } + if !_storage._schemes.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._schemes, fieldNumber: 10) + } + if _storage._deprecated != false { + try visitor.visitSingularBoolField(value: _storage._deprecated, fieldNumber: 11) + } + if !_storage._security.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._security, fieldNumber: 12) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 13) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_Parameter: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Parameter" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var bodyParameter: Openapi_V2_BodyParameter { + get { + if case .bodyParameter(let v)? = _storage._oneof {return v} + return Openapi_V2_BodyParameter() + } + set {_uniqueStorage()._oneof = .bodyParameter(newValue)} + } + + public var nonBodyParameter: Openapi_V2_NonBodyParameter { + get { + if case .nonBodyParameter(let v)? = _storage._oneof {return v} + return Openapi_V2_NonBodyParameter() + } + set {_uniqueStorage()._oneof = .nonBodyParameter(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case bodyParameter(Openapi_V2_BodyParameter) + case nonBodyParameter(Openapi_V2_NonBodyParameter) + + public static func ==(lhs: Openapi_V2_Parameter.OneOf_Oneof, rhs: Openapi_V2_Parameter.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.bodyParameter(let l), .bodyParameter(let r)): return l == r + case (.nonBodyParameter(let l), .nonBodyParameter(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V2_BodyParameter? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .bodyParameter(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .bodyParameter(v)} + case 2: + var v: Openapi_V2_NonBodyParameter? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .nonBodyParameter(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .nonBodyParameter(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .bodyParameter(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .nonBodyParameter(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// One or more JSON representations for parameters +public struct Openapi_V2_ParameterDefinitions: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".ParameterDefinitions" + + public var additionalProperties: [Openapi_V2_NamedParameter] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V2_ParametersItem: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".ParametersItem" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var parameter: Openapi_V2_Parameter { + get { + if case .parameter(let v)? = _storage._oneof {return v} + return Openapi_V2_Parameter() + } + set {_uniqueStorage()._oneof = .parameter(newValue)} + } + + public var jsonReference: Openapi_V2_JsonReference { + get { + if case .jsonReference(let v)? = _storage._oneof {return v} + return Openapi_V2_JsonReference() + } + set {_uniqueStorage()._oneof = .jsonReference(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case parameter(Openapi_V2_Parameter) + case jsonReference(Openapi_V2_JsonReference) + + public static func ==(lhs: Openapi_V2_ParametersItem.OneOf_Oneof, rhs: Openapi_V2_ParametersItem.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.parameter(let l), .parameter(let r)): return l == r + case (.jsonReference(let l), .jsonReference(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V2_Parameter? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .parameter(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .parameter(v)} + case 2: + var v: Openapi_V2_JsonReference? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .jsonReference(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .jsonReference(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .parameter(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .jsonReference(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_PathItem: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".PathItem" + + public var ref: String { + get {return _storage._ref} + set {_uniqueStorage()._ref = newValue} + } + + public var get: Openapi_V2_Operation { + get {return _storage._get ?? Openapi_V2_Operation()} + set {_uniqueStorage()._get = newValue} + } + /// Returns true if `get` has been explicitly set. + public var hasGet: Bool {return _storage._get != nil} + /// Clears the value of `get`. Subsequent reads from it will return its default value. + public mutating func clearGet() {_storage._get = nil} + + public var put: Openapi_V2_Operation { + get {return _storage._put ?? Openapi_V2_Operation()} + set {_uniqueStorage()._put = newValue} + } + /// Returns true if `put` has been explicitly set. + public var hasPut: Bool {return _storage._put != nil} + /// Clears the value of `put`. Subsequent reads from it will return its default value. + public mutating func clearPut() {_storage._put = nil} + + public var post: Openapi_V2_Operation { + get {return _storage._post ?? Openapi_V2_Operation()} + set {_uniqueStorage()._post = newValue} + } + /// Returns true if `post` has been explicitly set. + public var hasPost: Bool {return _storage._post != nil} + /// Clears the value of `post`. Subsequent reads from it will return its default value. + public mutating func clearPost() {_storage._post = nil} + + public var delete: Openapi_V2_Operation { + get {return _storage._delete ?? Openapi_V2_Operation()} + set {_uniqueStorage()._delete = newValue} + } + /// Returns true if `delete` has been explicitly set. + public var hasDelete: Bool {return _storage._delete != nil} + /// Clears the value of `delete`. Subsequent reads from it will return its default value. + public mutating func clearDelete() {_storage._delete = nil} + + public var options: Openapi_V2_Operation { + get {return _storage._options ?? Openapi_V2_Operation()} + set {_uniqueStorage()._options = newValue} + } + /// Returns true if `options` has been explicitly set. + public var hasOptions: Bool {return _storage._options != nil} + /// Clears the value of `options`. Subsequent reads from it will return its default value. + public mutating func clearOptions() {_storage._options = nil} + + public var head: Openapi_V2_Operation { + get {return _storage._head ?? Openapi_V2_Operation()} + set {_uniqueStorage()._head = newValue} + } + /// Returns true if `head` has been explicitly set. + public var hasHead: Bool {return _storage._head != nil} + /// Clears the value of `head`. Subsequent reads from it will return its default value. + public mutating func clearHead() {_storage._head = nil} + + public var patch: Openapi_V2_Operation { + get {return _storage._patch ?? Openapi_V2_Operation()} + set {_uniqueStorage()._patch = newValue} + } + /// Returns true if `patch` has been explicitly set. + public var hasPatch: Bool {return _storage._patch != nil} + /// Clears the value of `patch`. Subsequent reads from it will return its default value. + public mutating func clearPatch() {_storage._patch = nil} + + /// The parameters needed to send a valid API call. + public var parameters: [Openapi_V2_ParametersItem] { + get {return _storage._parameters} + set {_uniqueStorage()._parameters = newValue} + } + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._ref) + case 2: try decoder.decodeSingularMessageField(value: &_storage._get) + case 3: try decoder.decodeSingularMessageField(value: &_storage._put) + case 4: try decoder.decodeSingularMessageField(value: &_storage._post) + case 5: try decoder.decodeSingularMessageField(value: &_storage._delete) + case 6: try decoder.decodeSingularMessageField(value: &_storage._options) + case 7: try decoder.decodeSingularMessageField(value: &_storage._head) + case 8: try decoder.decodeSingularMessageField(value: &_storage._patch) + case 9: try decoder.decodeRepeatedMessageField(value: &_storage._parameters) + case 10: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._ref.isEmpty { + try visitor.visitSingularStringField(value: _storage._ref, fieldNumber: 1) + } + if let v = _storage._get { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + if let v = _storage._put { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if let v = _storage._post { + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + } + if let v = _storage._delete { + try visitor.visitSingularMessageField(value: v, fieldNumber: 5) + } + if let v = _storage._options { + try visitor.visitSingularMessageField(value: v, fieldNumber: 6) + } + if let v = _storage._head { + try visitor.visitSingularMessageField(value: v, fieldNumber: 7) + } + if let v = _storage._patch { + try visitor.visitSingularMessageField(value: v, fieldNumber: 8) + } + if !_storage._parameters.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._parameters, fieldNumber: 9) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 10) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_PathParameterSubSchema: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".PathParameterSubSchema" + + /// Determines whether or not this parameter is required or optional. + public var required: Bool { + get {return _storage._required} + set {_uniqueStorage()._required = newValue} + } + + /// Determines the location of the parameter. + public var `in`: String { + get {return _storage._in} + set {_uniqueStorage()._in = newValue} + } + + /// A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed. + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + /// The name of the parameter. + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + public var type: String { + get {return _storage._type} + set {_uniqueStorage()._type = newValue} + } + + public var format: String { + get {return _storage._format} + set {_uniqueStorage()._format = newValue} + } + + public var items: Openapi_V2_PrimitivesItems { + get {return _storage._items ?? Openapi_V2_PrimitivesItems()} + set {_uniqueStorage()._items = newValue} + } + /// Returns true if `items` has been explicitly set. + public var hasItems: Bool {return _storage._items != nil} + /// Clears the value of `items`. Subsequent reads from it will return its default value. + public mutating func clearItems() {_storage._items = nil} + + public var collectionFormat: String { + get {return _storage._collectionFormat} + set {_uniqueStorage()._collectionFormat = newValue} + } + + public var `default`: Openapi_V2_Any { + get {return _storage._default ?? Openapi_V2_Any()} + set {_uniqueStorage()._default = newValue} + } + /// Returns true if ``default`` has been explicitly set. + public var hasDefault: Bool {return _storage._default != nil} + /// Clears the value of ``default``. Subsequent reads from it will return its default value. + public mutating func clearDefault() {_storage._default = nil} + + public var maximum: Double { + get {return _storage._maximum} + set {_uniqueStorage()._maximum = newValue} + } + + public var exclusiveMaximum: Bool { + get {return _storage._exclusiveMaximum} + set {_uniqueStorage()._exclusiveMaximum = newValue} + } + + public var minimum: Double { + get {return _storage._minimum} + set {_uniqueStorage()._minimum = newValue} + } + + public var exclusiveMinimum: Bool { + get {return _storage._exclusiveMinimum} + set {_uniqueStorage()._exclusiveMinimum = newValue} + } + + public var maxLength: Int64 { + get {return _storage._maxLength} + set {_uniqueStorage()._maxLength = newValue} + } + + public var minLength: Int64 { + get {return _storage._minLength} + set {_uniqueStorage()._minLength = newValue} + } + + public var pattern: String { + get {return _storage._pattern} + set {_uniqueStorage()._pattern = newValue} + } + + public var maxItems: Int64 { + get {return _storage._maxItems} + set {_uniqueStorage()._maxItems = newValue} + } + + public var minItems: Int64 { + get {return _storage._minItems} + set {_uniqueStorage()._minItems = newValue} + } + + public var uniqueItems: Bool { + get {return _storage._uniqueItems} + set {_uniqueStorage()._uniqueItems = newValue} + } + + public var `enum`: [Openapi_V2_Any] { + get {return _storage._enum} + set {_uniqueStorage()._enum = newValue} + } + + public var multipleOf: Double { + get {return _storage._multipleOf} + set {_uniqueStorage()._multipleOf = newValue} + } + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularBoolField(value: &_storage._required) + case 2: try decoder.decodeSingularStringField(value: &_storage._in) + case 3: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 4: try decoder.decodeSingularStringField(value: &_storage._name) + case 5: try decoder.decodeSingularStringField(value: &_storage._type) + case 6: try decoder.decodeSingularStringField(value: &_storage._format) + case 7: try decoder.decodeSingularMessageField(value: &_storage._items) + case 8: try decoder.decodeSingularStringField(value: &_storage._collectionFormat) + case 9: try decoder.decodeSingularMessageField(value: &_storage._default) + case 10: try decoder.decodeSingularDoubleField(value: &_storage._maximum) + case 11: try decoder.decodeSingularBoolField(value: &_storage._exclusiveMaximum) + case 12: try decoder.decodeSingularDoubleField(value: &_storage._minimum) + case 13: try decoder.decodeSingularBoolField(value: &_storage._exclusiveMinimum) + case 14: try decoder.decodeSingularInt64Field(value: &_storage._maxLength) + case 15: try decoder.decodeSingularInt64Field(value: &_storage._minLength) + case 16: try decoder.decodeSingularStringField(value: &_storage._pattern) + case 17: try decoder.decodeSingularInt64Field(value: &_storage._maxItems) + case 18: try decoder.decodeSingularInt64Field(value: &_storage._minItems) + case 19: try decoder.decodeSingularBoolField(value: &_storage._uniqueItems) + case 20: try decoder.decodeRepeatedMessageField(value: &_storage._enum) + case 21: try decoder.decodeSingularDoubleField(value: &_storage._multipleOf) + case 22: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if _storage._required != false { + try visitor.visitSingularBoolField(value: _storage._required, fieldNumber: 1) + } + if !_storage._in.isEmpty { + try visitor.visitSingularStringField(value: _storage._in, fieldNumber: 2) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 3) + } + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 4) + } + if !_storage._type.isEmpty { + try visitor.visitSingularStringField(value: _storage._type, fieldNumber: 5) + } + if !_storage._format.isEmpty { + try visitor.visitSingularStringField(value: _storage._format, fieldNumber: 6) + } + if let v = _storage._items { + try visitor.visitSingularMessageField(value: v, fieldNumber: 7) + } + if !_storage._collectionFormat.isEmpty { + try visitor.visitSingularStringField(value: _storage._collectionFormat, fieldNumber: 8) + } + if let v = _storage._default { + try visitor.visitSingularMessageField(value: v, fieldNumber: 9) + } + if _storage._maximum != 0 { + try visitor.visitSingularDoubleField(value: _storage._maximum, fieldNumber: 10) + } + if _storage._exclusiveMaximum != false { + try visitor.visitSingularBoolField(value: _storage._exclusiveMaximum, fieldNumber: 11) + } + if _storage._minimum != 0 { + try visitor.visitSingularDoubleField(value: _storage._minimum, fieldNumber: 12) + } + if _storage._exclusiveMinimum != false { + try visitor.visitSingularBoolField(value: _storage._exclusiveMinimum, fieldNumber: 13) + } + if _storage._maxLength != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxLength, fieldNumber: 14) + } + if _storage._minLength != 0 { + try visitor.visitSingularInt64Field(value: _storage._minLength, fieldNumber: 15) + } + if !_storage._pattern.isEmpty { + try visitor.visitSingularStringField(value: _storage._pattern, fieldNumber: 16) + } + if _storage._maxItems != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxItems, fieldNumber: 17) + } + if _storage._minItems != 0 { + try visitor.visitSingularInt64Field(value: _storage._minItems, fieldNumber: 18) + } + if _storage._uniqueItems != false { + try visitor.visitSingularBoolField(value: _storage._uniqueItems, fieldNumber: 19) + } + if !_storage._enum.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._enum, fieldNumber: 20) + } + if _storage._multipleOf != 0 { + try visitor.visitSingularDoubleField(value: _storage._multipleOf, fieldNumber: 21) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 22) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Relative paths to the individual endpoints. They must be relative to the 'basePath'. +public struct Openapi_V2_Paths: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Paths" + + public var vendorExtension: [Openapi_V2_NamedAny] = [] + + public var path: [Openapi_V2_NamedPathItem] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.vendorExtension) + case 2: try decoder.decodeRepeatedMessageField(value: &self.path) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: self.vendorExtension, fieldNumber: 1) + } + if !self.path.isEmpty { + try visitor.visitRepeatedMessageField(value: self.path, fieldNumber: 2) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V2_PrimitivesItems: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".PrimitivesItems" + + public var type: String { + get {return _storage._type} + set {_uniqueStorage()._type = newValue} + } + + public var format: String { + get {return _storage._format} + set {_uniqueStorage()._format = newValue} + } + + public var items: Openapi_V2_PrimitivesItems { + get {return _storage._items ?? Openapi_V2_PrimitivesItems()} + set {_uniqueStorage()._items = newValue} + } + /// Returns true if `items` has been explicitly set. + public var hasItems: Bool {return _storage._items != nil} + /// Clears the value of `items`. Subsequent reads from it will return its default value. + public mutating func clearItems() {_storage._items = nil} + + public var collectionFormat: String { + get {return _storage._collectionFormat} + set {_uniqueStorage()._collectionFormat = newValue} + } + + public var `default`: Openapi_V2_Any { + get {return _storage._default ?? Openapi_V2_Any()} + set {_uniqueStorage()._default = newValue} + } + /// Returns true if ``default`` has been explicitly set. + public var hasDefault: Bool {return _storage._default != nil} + /// Clears the value of ``default``. Subsequent reads from it will return its default value. + public mutating func clearDefault() {_storage._default = nil} + + public var maximum: Double { + get {return _storage._maximum} + set {_uniqueStorage()._maximum = newValue} + } + + public var exclusiveMaximum: Bool { + get {return _storage._exclusiveMaximum} + set {_uniqueStorage()._exclusiveMaximum = newValue} + } + + public var minimum: Double { + get {return _storage._minimum} + set {_uniqueStorage()._minimum = newValue} + } + + public var exclusiveMinimum: Bool { + get {return _storage._exclusiveMinimum} + set {_uniqueStorage()._exclusiveMinimum = newValue} + } + + public var maxLength: Int64 { + get {return _storage._maxLength} + set {_uniqueStorage()._maxLength = newValue} + } + + public var minLength: Int64 { + get {return _storage._minLength} + set {_uniqueStorage()._minLength = newValue} + } + + public var pattern: String { + get {return _storage._pattern} + set {_uniqueStorage()._pattern = newValue} + } + + public var maxItems: Int64 { + get {return _storage._maxItems} + set {_uniqueStorage()._maxItems = newValue} + } + + public var minItems: Int64 { + get {return _storage._minItems} + set {_uniqueStorage()._minItems = newValue} + } + + public var uniqueItems: Bool { + get {return _storage._uniqueItems} + set {_uniqueStorage()._uniqueItems = newValue} + } + + public var `enum`: [Openapi_V2_Any] { + get {return _storage._enum} + set {_uniqueStorage()._enum = newValue} + } + + public var multipleOf: Double { + get {return _storage._multipleOf} + set {_uniqueStorage()._multipleOf = newValue} + } + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._type) + case 2: try decoder.decodeSingularStringField(value: &_storage._format) + case 3: try decoder.decodeSingularMessageField(value: &_storage._items) + case 4: try decoder.decodeSingularStringField(value: &_storage._collectionFormat) + case 5: try decoder.decodeSingularMessageField(value: &_storage._default) + case 6: try decoder.decodeSingularDoubleField(value: &_storage._maximum) + case 7: try decoder.decodeSingularBoolField(value: &_storage._exclusiveMaximum) + case 8: try decoder.decodeSingularDoubleField(value: &_storage._minimum) + case 9: try decoder.decodeSingularBoolField(value: &_storage._exclusiveMinimum) + case 10: try decoder.decodeSingularInt64Field(value: &_storage._maxLength) + case 11: try decoder.decodeSingularInt64Field(value: &_storage._minLength) + case 12: try decoder.decodeSingularStringField(value: &_storage._pattern) + case 13: try decoder.decodeSingularInt64Field(value: &_storage._maxItems) + case 14: try decoder.decodeSingularInt64Field(value: &_storage._minItems) + case 15: try decoder.decodeSingularBoolField(value: &_storage._uniqueItems) + case 16: try decoder.decodeRepeatedMessageField(value: &_storage._enum) + case 17: try decoder.decodeSingularDoubleField(value: &_storage._multipleOf) + case 18: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._type.isEmpty { + try visitor.visitSingularStringField(value: _storage._type, fieldNumber: 1) + } + if !_storage._format.isEmpty { + try visitor.visitSingularStringField(value: _storage._format, fieldNumber: 2) + } + if let v = _storage._items { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if !_storage._collectionFormat.isEmpty { + try visitor.visitSingularStringField(value: _storage._collectionFormat, fieldNumber: 4) + } + if let v = _storage._default { + try visitor.visitSingularMessageField(value: v, fieldNumber: 5) + } + if _storage._maximum != 0 { + try visitor.visitSingularDoubleField(value: _storage._maximum, fieldNumber: 6) + } + if _storage._exclusiveMaximum != false { + try visitor.visitSingularBoolField(value: _storage._exclusiveMaximum, fieldNumber: 7) + } + if _storage._minimum != 0 { + try visitor.visitSingularDoubleField(value: _storage._minimum, fieldNumber: 8) + } + if _storage._exclusiveMinimum != false { + try visitor.visitSingularBoolField(value: _storage._exclusiveMinimum, fieldNumber: 9) + } + if _storage._maxLength != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxLength, fieldNumber: 10) + } + if _storage._minLength != 0 { + try visitor.visitSingularInt64Field(value: _storage._minLength, fieldNumber: 11) + } + if !_storage._pattern.isEmpty { + try visitor.visitSingularStringField(value: _storage._pattern, fieldNumber: 12) + } + if _storage._maxItems != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxItems, fieldNumber: 13) + } + if _storage._minItems != 0 { + try visitor.visitSingularInt64Field(value: _storage._minItems, fieldNumber: 14) + } + if _storage._uniqueItems != false { + try visitor.visitSingularBoolField(value: _storage._uniqueItems, fieldNumber: 15) + } + if !_storage._enum.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._enum, fieldNumber: 16) + } + if _storage._multipleOf != 0 { + try visitor.visitSingularDoubleField(value: _storage._multipleOf, fieldNumber: 17) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 18) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_Properties: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Properties" + + public var additionalProperties: [Openapi_V2_NamedSchema] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V2_QueryParameterSubSchema: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".QueryParameterSubSchema" + + /// Determines whether or not this parameter is required or optional. + public var required: Bool { + get {return _storage._required} + set {_uniqueStorage()._required = newValue} + } + + /// Determines the location of the parameter. + public var `in`: String { + get {return _storage._in} + set {_uniqueStorage()._in = newValue} + } + + /// A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed. + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + /// The name of the parameter. + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// allows sending a parameter by name only or with an empty value. + public var allowEmptyValue: Bool { + get {return _storage._allowEmptyValue} + set {_uniqueStorage()._allowEmptyValue = newValue} + } + + public var type: String { + get {return _storage._type} + set {_uniqueStorage()._type = newValue} + } + + public var format: String { + get {return _storage._format} + set {_uniqueStorage()._format = newValue} + } + + public var items: Openapi_V2_PrimitivesItems { + get {return _storage._items ?? Openapi_V2_PrimitivesItems()} + set {_uniqueStorage()._items = newValue} + } + /// Returns true if `items` has been explicitly set. + public var hasItems: Bool {return _storage._items != nil} + /// Clears the value of `items`. Subsequent reads from it will return its default value. + public mutating func clearItems() {_storage._items = nil} + + public var collectionFormat: String { + get {return _storage._collectionFormat} + set {_uniqueStorage()._collectionFormat = newValue} + } + + public var `default`: Openapi_V2_Any { + get {return _storage._default ?? Openapi_V2_Any()} + set {_uniqueStorage()._default = newValue} + } + /// Returns true if ``default`` has been explicitly set. + public var hasDefault: Bool {return _storage._default != nil} + /// Clears the value of ``default``. Subsequent reads from it will return its default value. + public mutating func clearDefault() {_storage._default = nil} + + public var maximum: Double { + get {return _storage._maximum} + set {_uniqueStorage()._maximum = newValue} + } + + public var exclusiveMaximum: Bool { + get {return _storage._exclusiveMaximum} + set {_uniqueStorage()._exclusiveMaximum = newValue} + } + + public var minimum: Double { + get {return _storage._minimum} + set {_uniqueStorage()._minimum = newValue} + } + + public var exclusiveMinimum: Bool { + get {return _storage._exclusiveMinimum} + set {_uniqueStorage()._exclusiveMinimum = newValue} + } + + public var maxLength: Int64 { + get {return _storage._maxLength} + set {_uniqueStorage()._maxLength = newValue} + } + + public var minLength: Int64 { + get {return _storage._minLength} + set {_uniqueStorage()._minLength = newValue} + } + + public var pattern: String { + get {return _storage._pattern} + set {_uniqueStorage()._pattern = newValue} + } + + public var maxItems: Int64 { + get {return _storage._maxItems} + set {_uniqueStorage()._maxItems = newValue} + } + + public var minItems: Int64 { + get {return _storage._minItems} + set {_uniqueStorage()._minItems = newValue} + } + + public var uniqueItems: Bool { + get {return _storage._uniqueItems} + set {_uniqueStorage()._uniqueItems = newValue} + } + + public var `enum`: [Openapi_V2_Any] { + get {return _storage._enum} + set {_uniqueStorage()._enum = newValue} + } + + public var multipleOf: Double { + get {return _storage._multipleOf} + set {_uniqueStorage()._multipleOf = newValue} + } + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularBoolField(value: &_storage._required) + case 2: try decoder.decodeSingularStringField(value: &_storage._in) + case 3: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 4: try decoder.decodeSingularStringField(value: &_storage._name) + case 5: try decoder.decodeSingularBoolField(value: &_storage._allowEmptyValue) + case 6: try decoder.decodeSingularStringField(value: &_storage._type) + case 7: try decoder.decodeSingularStringField(value: &_storage._format) + case 8: try decoder.decodeSingularMessageField(value: &_storage._items) + case 9: try decoder.decodeSingularStringField(value: &_storage._collectionFormat) + case 10: try decoder.decodeSingularMessageField(value: &_storage._default) + case 11: try decoder.decodeSingularDoubleField(value: &_storage._maximum) + case 12: try decoder.decodeSingularBoolField(value: &_storage._exclusiveMaximum) + case 13: try decoder.decodeSingularDoubleField(value: &_storage._minimum) + case 14: try decoder.decodeSingularBoolField(value: &_storage._exclusiveMinimum) + case 15: try decoder.decodeSingularInt64Field(value: &_storage._maxLength) + case 16: try decoder.decodeSingularInt64Field(value: &_storage._minLength) + case 17: try decoder.decodeSingularStringField(value: &_storage._pattern) + case 18: try decoder.decodeSingularInt64Field(value: &_storage._maxItems) + case 19: try decoder.decodeSingularInt64Field(value: &_storage._minItems) + case 20: try decoder.decodeSingularBoolField(value: &_storage._uniqueItems) + case 21: try decoder.decodeRepeatedMessageField(value: &_storage._enum) + case 22: try decoder.decodeSingularDoubleField(value: &_storage._multipleOf) + case 23: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if _storage._required != false { + try visitor.visitSingularBoolField(value: _storage._required, fieldNumber: 1) + } + if !_storage._in.isEmpty { + try visitor.visitSingularStringField(value: _storage._in, fieldNumber: 2) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 3) + } + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 4) + } + if _storage._allowEmptyValue != false { + try visitor.visitSingularBoolField(value: _storage._allowEmptyValue, fieldNumber: 5) + } + if !_storage._type.isEmpty { + try visitor.visitSingularStringField(value: _storage._type, fieldNumber: 6) + } + if !_storage._format.isEmpty { + try visitor.visitSingularStringField(value: _storage._format, fieldNumber: 7) + } + if let v = _storage._items { + try visitor.visitSingularMessageField(value: v, fieldNumber: 8) + } + if !_storage._collectionFormat.isEmpty { + try visitor.visitSingularStringField(value: _storage._collectionFormat, fieldNumber: 9) + } + if let v = _storage._default { + try visitor.visitSingularMessageField(value: v, fieldNumber: 10) + } + if _storage._maximum != 0 { + try visitor.visitSingularDoubleField(value: _storage._maximum, fieldNumber: 11) + } + if _storage._exclusiveMaximum != false { + try visitor.visitSingularBoolField(value: _storage._exclusiveMaximum, fieldNumber: 12) + } + if _storage._minimum != 0 { + try visitor.visitSingularDoubleField(value: _storage._minimum, fieldNumber: 13) + } + if _storage._exclusiveMinimum != false { + try visitor.visitSingularBoolField(value: _storage._exclusiveMinimum, fieldNumber: 14) + } + if _storage._maxLength != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxLength, fieldNumber: 15) + } + if _storage._minLength != 0 { + try visitor.visitSingularInt64Field(value: _storage._minLength, fieldNumber: 16) + } + if !_storage._pattern.isEmpty { + try visitor.visitSingularStringField(value: _storage._pattern, fieldNumber: 17) + } + if _storage._maxItems != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxItems, fieldNumber: 18) + } + if _storage._minItems != 0 { + try visitor.visitSingularInt64Field(value: _storage._minItems, fieldNumber: 19) + } + if _storage._uniqueItems != false { + try visitor.visitSingularBoolField(value: _storage._uniqueItems, fieldNumber: 20) + } + if !_storage._enum.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._enum, fieldNumber: 21) + } + if _storage._multipleOf != 0 { + try visitor.visitSingularDoubleField(value: _storage._multipleOf, fieldNumber: 22) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 23) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_Response: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Response" + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var schema: Openapi_V2_SchemaItem { + get {return _storage._schema ?? Openapi_V2_SchemaItem()} + set {_uniqueStorage()._schema = newValue} + } + /// Returns true if `schema` has been explicitly set. + public var hasSchema: Bool {return _storage._schema != nil} + /// Clears the value of `schema`. Subsequent reads from it will return its default value. + public mutating func clearSchema() {_storage._schema = nil} + + public var headers: Openapi_V2_Headers { + get {return _storage._headers ?? Openapi_V2_Headers()} + set {_uniqueStorage()._headers = newValue} + } + /// Returns true if `headers` has been explicitly set. + public var hasHeaders: Bool {return _storage._headers != nil} + /// Clears the value of `headers`. Subsequent reads from it will return its default value. + public mutating func clearHeaders() {_storage._headers = nil} + + public var examples: Openapi_V2_Examples { + get {return _storage._examples ?? Openapi_V2_Examples()} + set {_uniqueStorage()._examples = newValue} + } + /// Returns true if `examples` has been explicitly set. + public var hasExamples: Bool {return _storage._examples != nil} + /// Clears the value of `examples`. Subsequent reads from it will return its default value. + public mutating func clearExamples() {_storage._examples = nil} + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 2: try decoder.decodeSingularMessageField(value: &_storage._schema) + case 3: try decoder.decodeSingularMessageField(value: &_storage._headers) + case 4: try decoder.decodeSingularMessageField(value: &_storage._examples) + case 5: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 1) + } + if let v = _storage._schema { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + if let v = _storage._headers { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if let v = _storage._examples { + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 5) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// One or more JSON representations for parameters +public struct Openapi_V2_ResponseDefinitions: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".ResponseDefinitions" + + public var additionalProperties: [Openapi_V2_NamedResponse] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V2_ResponseValue: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".ResponseValue" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var response: Openapi_V2_Response { + get { + if case .response(let v)? = _storage._oneof {return v} + return Openapi_V2_Response() + } + set {_uniqueStorage()._oneof = .response(newValue)} + } + + public var jsonReference: Openapi_V2_JsonReference { + get { + if case .jsonReference(let v)? = _storage._oneof {return v} + return Openapi_V2_JsonReference() + } + set {_uniqueStorage()._oneof = .jsonReference(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case response(Openapi_V2_Response) + case jsonReference(Openapi_V2_JsonReference) + + public static func ==(lhs: Openapi_V2_ResponseValue.OneOf_Oneof, rhs: Openapi_V2_ResponseValue.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.response(let l), .response(let r)): return l == r + case (.jsonReference(let l), .jsonReference(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V2_Response? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .response(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .response(v)} + case 2: + var v: Openapi_V2_JsonReference? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .jsonReference(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .jsonReference(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .response(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .jsonReference(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Response objects names can either be any valid HTTP status code or 'default'. +public struct Openapi_V2_Responses: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Responses" + + public var responseCode: [Openapi_V2_NamedResponseValue] = [] + + public var vendorExtension: [Openapi_V2_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.responseCode) + case 2: try decoder.decodeRepeatedMessageField(value: &self.vendorExtension) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.responseCode.isEmpty { + try visitor.visitRepeatedMessageField(value: self.responseCode, fieldNumber: 1) + } + if !self.vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: self.vendorExtension, fieldNumber: 2) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// A deterministic version of a JSON Schema object. +public struct Openapi_V2_Schema: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Schema" + + public var ref: String { + get {return _storage._ref} + set {_uniqueStorage()._ref = newValue} + } + + public var format: String { + get {return _storage._format} + set {_uniqueStorage()._format = newValue} + } + + public var title: String { + get {return _storage._title} + set {_uniqueStorage()._title = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var `default`: Openapi_V2_Any { + get {return _storage._default ?? Openapi_V2_Any()} + set {_uniqueStorage()._default = newValue} + } + /// Returns true if ``default`` has been explicitly set. + public var hasDefault: Bool {return _storage._default != nil} + /// Clears the value of ``default``. Subsequent reads from it will return its default value. + public mutating func clearDefault() {_storage._default = nil} + + public var multipleOf: Double { + get {return _storage._multipleOf} + set {_uniqueStorage()._multipleOf = newValue} + } + + public var maximum: Double { + get {return _storage._maximum} + set {_uniqueStorage()._maximum = newValue} + } + + public var exclusiveMaximum: Bool { + get {return _storage._exclusiveMaximum} + set {_uniqueStorage()._exclusiveMaximum = newValue} + } + + public var minimum: Double { + get {return _storage._minimum} + set {_uniqueStorage()._minimum = newValue} + } + + public var exclusiveMinimum: Bool { + get {return _storage._exclusiveMinimum} + set {_uniqueStorage()._exclusiveMinimum = newValue} + } + + public var maxLength: Int64 { + get {return _storage._maxLength} + set {_uniqueStorage()._maxLength = newValue} + } + + public var minLength: Int64 { + get {return _storage._minLength} + set {_uniqueStorage()._minLength = newValue} + } + + public var pattern: String { + get {return _storage._pattern} + set {_uniqueStorage()._pattern = newValue} + } + + public var maxItems: Int64 { + get {return _storage._maxItems} + set {_uniqueStorage()._maxItems = newValue} + } + + public var minItems: Int64 { + get {return _storage._minItems} + set {_uniqueStorage()._minItems = newValue} + } + + public var uniqueItems: Bool { + get {return _storage._uniqueItems} + set {_uniqueStorage()._uniqueItems = newValue} + } + + public var maxProperties: Int64 { + get {return _storage._maxProperties} + set {_uniqueStorage()._maxProperties = newValue} + } + + public var minProperties: Int64 { + get {return _storage._minProperties} + set {_uniqueStorage()._minProperties = newValue} + } + + public var required: [String] { + get {return _storage._required} + set {_uniqueStorage()._required = newValue} + } + + public var `enum`: [Openapi_V2_Any] { + get {return _storage._enum} + set {_uniqueStorage()._enum = newValue} + } + + public var additionalProperties: Openapi_V2_AdditionalPropertiesItem { + get {return _storage._additionalProperties ?? Openapi_V2_AdditionalPropertiesItem()} + set {_uniqueStorage()._additionalProperties = newValue} + } + /// Returns true if `additionalProperties` has been explicitly set. + public var hasAdditionalProperties: Bool {return _storage._additionalProperties != nil} + /// Clears the value of `additionalProperties`. Subsequent reads from it will return its default value. + public mutating func clearAdditionalProperties() {_storage._additionalProperties = nil} + + public var type: Openapi_V2_TypeItem { + get {return _storage._type ?? Openapi_V2_TypeItem()} + set {_uniqueStorage()._type = newValue} + } + /// Returns true if `type` has been explicitly set. + public var hasType: Bool {return _storage._type != nil} + /// Clears the value of `type`. Subsequent reads from it will return its default value. + public mutating func clearType() {_storage._type = nil} + + public var items: Openapi_V2_ItemsItem { + get {return _storage._items ?? Openapi_V2_ItemsItem()} + set {_uniqueStorage()._items = newValue} + } + /// Returns true if `items` has been explicitly set. + public var hasItems: Bool {return _storage._items != nil} + /// Clears the value of `items`. Subsequent reads from it will return its default value. + public mutating func clearItems() {_storage._items = nil} + + public var allOf: [Openapi_V2_Schema] { + get {return _storage._allOf} + set {_uniqueStorage()._allOf = newValue} + } + + public var properties: Openapi_V2_Properties { + get {return _storage._properties ?? Openapi_V2_Properties()} + set {_uniqueStorage()._properties = newValue} + } + /// Returns true if `properties` has been explicitly set. + public var hasProperties: Bool {return _storage._properties != nil} + /// Clears the value of `properties`. Subsequent reads from it will return its default value. + public mutating func clearProperties() {_storage._properties = nil} + + public var discriminator: String { + get {return _storage._discriminator} + set {_uniqueStorage()._discriminator = newValue} + } + + public var readOnly: Bool { + get {return _storage._readOnly} + set {_uniqueStorage()._readOnly = newValue} + } + + public var xml: Openapi_V2_Xml { + get {return _storage._xml ?? Openapi_V2_Xml()} + set {_uniqueStorage()._xml = newValue} + } + /// Returns true if `xml` has been explicitly set. + public var hasXml: Bool {return _storage._xml != nil} + /// Clears the value of `xml`. Subsequent reads from it will return its default value. + public mutating func clearXml() {_storage._xml = nil} + + public var externalDocs: Openapi_V2_ExternalDocs { + get {return _storage._externalDocs ?? Openapi_V2_ExternalDocs()} + set {_uniqueStorage()._externalDocs = newValue} + } + /// Returns true if `externalDocs` has been explicitly set. + public var hasExternalDocs: Bool {return _storage._externalDocs != nil} + /// Clears the value of `externalDocs`. Subsequent reads from it will return its default value. + public mutating func clearExternalDocs() {_storage._externalDocs = nil} + + public var example: Openapi_V2_Any { + get {return _storage._example ?? Openapi_V2_Any()} + set {_uniqueStorage()._example = newValue} + } + /// Returns true if `example` has been explicitly set. + public var hasExample: Bool {return _storage._example != nil} + /// Clears the value of `example`. Subsequent reads from it will return its default value. + public mutating func clearExample() {_storage._example = nil} + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._ref) + case 2: try decoder.decodeSingularStringField(value: &_storage._format) + case 3: try decoder.decodeSingularStringField(value: &_storage._title) + case 4: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 5: try decoder.decodeSingularMessageField(value: &_storage._default) + case 6: try decoder.decodeSingularDoubleField(value: &_storage._multipleOf) + case 7: try decoder.decodeSingularDoubleField(value: &_storage._maximum) + case 8: try decoder.decodeSingularBoolField(value: &_storage._exclusiveMaximum) + case 9: try decoder.decodeSingularDoubleField(value: &_storage._minimum) + case 10: try decoder.decodeSingularBoolField(value: &_storage._exclusiveMinimum) + case 11: try decoder.decodeSingularInt64Field(value: &_storage._maxLength) + case 12: try decoder.decodeSingularInt64Field(value: &_storage._minLength) + case 13: try decoder.decodeSingularStringField(value: &_storage._pattern) + case 14: try decoder.decodeSingularInt64Field(value: &_storage._maxItems) + case 15: try decoder.decodeSingularInt64Field(value: &_storage._minItems) + case 16: try decoder.decodeSingularBoolField(value: &_storage._uniqueItems) + case 17: try decoder.decodeSingularInt64Field(value: &_storage._maxProperties) + case 18: try decoder.decodeSingularInt64Field(value: &_storage._minProperties) + case 19: try decoder.decodeRepeatedStringField(value: &_storage._required) + case 20: try decoder.decodeRepeatedMessageField(value: &_storage._enum) + case 21: try decoder.decodeSingularMessageField(value: &_storage._additionalProperties) + case 22: try decoder.decodeSingularMessageField(value: &_storage._type) + case 23: try decoder.decodeSingularMessageField(value: &_storage._items) + case 24: try decoder.decodeRepeatedMessageField(value: &_storage._allOf) + case 25: try decoder.decodeSingularMessageField(value: &_storage._properties) + case 26: try decoder.decodeSingularStringField(value: &_storage._discriminator) + case 27: try decoder.decodeSingularBoolField(value: &_storage._readOnly) + case 28: try decoder.decodeSingularMessageField(value: &_storage._xml) + case 29: try decoder.decodeSingularMessageField(value: &_storage._externalDocs) + case 30: try decoder.decodeSingularMessageField(value: &_storage._example) + case 31: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._ref.isEmpty { + try visitor.visitSingularStringField(value: _storage._ref, fieldNumber: 1) + } + if !_storage._format.isEmpty { + try visitor.visitSingularStringField(value: _storage._format, fieldNumber: 2) + } + if !_storage._title.isEmpty { + try visitor.visitSingularStringField(value: _storage._title, fieldNumber: 3) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 4) + } + if let v = _storage._default { + try visitor.visitSingularMessageField(value: v, fieldNumber: 5) + } + if _storage._multipleOf != 0 { + try visitor.visitSingularDoubleField(value: _storage._multipleOf, fieldNumber: 6) + } + if _storage._maximum != 0 { + try visitor.visitSingularDoubleField(value: _storage._maximum, fieldNumber: 7) + } + if _storage._exclusiveMaximum != false { + try visitor.visitSingularBoolField(value: _storage._exclusiveMaximum, fieldNumber: 8) + } + if _storage._minimum != 0 { + try visitor.visitSingularDoubleField(value: _storage._minimum, fieldNumber: 9) + } + if _storage._exclusiveMinimum != false { + try visitor.visitSingularBoolField(value: _storage._exclusiveMinimum, fieldNumber: 10) + } + if _storage._maxLength != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxLength, fieldNumber: 11) + } + if _storage._minLength != 0 { + try visitor.visitSingularInt64Field(value: _storage._minLength, fieldNumber: 12) + } + if !_storage._pattern.isEmpty { + try visitor.visitSingularStringField(value: _storage._pattern, fieldNumber: 13) + } + if _storage._maxItems != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxItems, fieldNumber: 14) + } + if _storage._minItems != 0 { + try visitor.visitSingularInt64Field(value: _storage._minItems, fieldNumber: 15) + } + if _storage._uniqueItems != false { + try visitor.visitSingularBoolField(value: _storage._uniqueItems, fieldNumber: 16) + } + if _storage._maxProperties != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxProperties, fieldNumber: 17) + } + if _storage._minProperties != 0 { + try visitor.visitSingularInt64Field(value: _storage._minProperties, fieldNumber: 18) + } + if !_storage._required.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._required, fieldNumber: 19) + } + if !_storage._enum.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._enum, fieldNumber: 20) + } + if let v = _storage._additionalProperties { + try visitor.visitSingularMessageField(value: v, fieldNumber: 21) + } + if let v = _storage._type { + try visitor.visitSingularMessageField(value: v, fieldNumber: 22) + } + if let v = _storage._items { + try visitor.visitSingularMessageField(value: v, fieldNumber: 23) + } + if !_storage._allOf.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._allOf, fieldNumber: 24) + } + if let v = _storage._properties { + try visitor.visitSingularMessageField(value: v, fieldNumber: 25) + } + if !_storage._discriminator.isEmpty { + try visitor.visitSingularStringField(value: _storage._discriminator, fieldNumber: 26) + } + if _storage._readOnly != false { + try visitor.visitSingularBoolField(value: _storage._readOnly, fieldNumber: 27) + } + if let v = _storage._xml { + try visitor.visitSingularMessageField(value: v, fieldNumber: 28) + } + if let v = _storage._externalDocs { + try visitor.visitSingularMessageField(value: v, fieldNumber: 29) + } + if let v = _storage._example { + try visitor.visitSingularMessageField(value: v, fieldNumber: 30) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 31) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_SchemaItem: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".SchemaItem" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var schema: Openapi_V2_Schema { + get { + if case .schema(let v)? = _storage._oneof {return v} + return Openapi_V2_Schema() + } + set {_uniqueStorage()._oneof = .schema(newValue)} + } + + public var fileSchema: Openapi_V2_FileSchema { + get { + if case .fileSchema(let v)? = _storage._oneof {return v} + return Openapi_V2_FileSchema() + } + set {_uniqueStorage()._oneof = .fileSchema(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case schema(Openapi_V2_Schema) + case fileSchema(Openapi_V2_FileSchema) + + public static func ==(lhs: Openapi_V2_SchemaItem.OneOf_Oneof, rhs: Openapi_V2_SchemaItem.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.schema(let l), .schema(let r)): return l == r + case (.fileSchema(let l), .fileSchema(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V2_Schema? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .schema(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .schema(v)} + case 2: + var v: Openapi_V2_FileSchema? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .fileSchema(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .fileSchema(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .schema(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .fileSchema(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_SecurityDefinitions: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".SecurityDefinitions" + + public var additionalProperties: [Openapi_V2_NamedSecurityDefinitionsItem] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V2_SecurityDefinitionsItem: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".SecurityDefinitionsItem" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var basicAuthenticationSecurity: Openapi_V2_BasicAuthenticationSecurity { + get { + if case .basicAuthenticationSecurity(let v)? = _storage._oneof {return v} + return Openapi_V2_BasicAuthenticationSecurity() + } + set {_uniqueStorage()._oneof = .basicAuthenticationSecurity(newValue)} + } + + public var apiKeySecurity: Openapi_V2_ApiKeySecurity { + get { + if case .apiKeySecurity(let v)? = _storage._oneof {return v} + return Openapi_V2_ApiKeySecurity() + } + set {_uniqueStorage()._oneof = .apiKeySecurity(newValue)} + } + + public var oauth2ImplicitSecurity: Openapi_V2_Oauth2ImplicitSecurity { + get { + if case .oauth2ImplicitSecurity(let v)? = _storage._oneof {return v} + return Openapi_V2_Oauth2ImplicitSecurity() + } + set {_uniqueStorage()._oneof = .oauth2ImplicitSecurity(newValue)} + } + + public var oauth2PasswordSecurity: Openapi_V2_Oauth2PasswordSecurity { + get { + if case .oauth2PasswordSecurity(let v)? = _storage._oneof {return v} + return Openapi_V2_Oauth2PasswordSecurity() + } + set {_uniqueStorage()._oneof = .oauth2PasswordSecurity(newValue)} + } + + public var oauth2ApplicationSecurity: Openapi_V2_Oauth2ApplicationSecurity { + get { + if case .oauth2ApplicationSecurity(let v)? = _storage._oneof {return v} + return Openapi_V2_Oauth2ApplicationSecurity() + } + set {_uniqueStorage()._oneof = .oauth2ApplicationSecurity(newValue)} + } + + public var oauth2AccessCodeSecurity: Openapi_V2_Oauth2AccessCodeSecurity { + get { + if case .oauth2AccessCodeSecurity(let v)? = _storage._oneof {return v} + return Openapi_V2_Oauth2AccessCodeSecurity() + } + set {_uniqueStorage()._oneof = .oauth2AccessCodeSecurity(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case basicAuthenticationSecurity(Openapi_V2_BasicAuthenticationSecurity) + case apiKeySecurity(Openapi_V2_ApiKeySecurity) + case oauth2ImplicitSecurity(Openapi_V2_Oauth2ImplicitSecurity) + case oauth2PasswordSecurity(Openapi_V2_Oauth2PasswordSecurity) + case oauth2ApplicationSecurity(Openapi_V2_Oauth2ApplicationSecurity) + case oauth2AccessCodeSecurity(Openapi_V2_Oauth2AccessCodeSecurity) + + public static func ==(lhs: Openapi_V2_SecurityDefinitionsItem.OneOf_Oneof, rhs: Openapi_V2_SecurityDefinitionsItem.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.basicAuthenticationSecurity(let l), .basicAuthenticationSecurity(let r)): return l == r + case (.apiKeySecurity(let l), .apiKeySecurity(let r)): return l == r + case (.oauth2ImplicitSecurity(let l), .oauth2ImplicitSecurity(let r)): return l == r + case (.oauth2PasswordSecurity(let l), .oauth2PasswordSecurity(let r)): return l == r + case (.oauth2ApplicationSecurity(let l), .oauth2ApplicationSecurity(let r)): return l == r + case (.oauth2AccessCodeSecurity(let l), .oauth2AccessCodeSecurity(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V2_BasicAuthenticationSecurity? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .basicAuthenticationSecurity(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .basicAuthenticationSecurity(v)} + case 2: + var v: Openapi_V2_ApiKeySecurity? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .apiKeySecurity(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .apiKeySecurity(v)} + case 3: + var v: Openapi_V2_Oauth2ImplicitSecurity? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .oauth2ImplicitSecurity(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .oauth2ImplicitSecurity(v)} + case 4: + var v: Openapi_V2_Oauth2PasswordSecurity? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .oauth2PasswordSecurity(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .oauth2PasswordSecurity(v)} + case 5: + var v: Openapi_V2_Oauth2ApplicationSecurity? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .oauth2ApplicationSecurity(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .oauth2ApplicationSecurity(v)} + case 6: + var v: Openapi_V2_Oauth2AccessCodeSecurity? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .oauth2AccessCodeSecurity(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .oauth2AccessCodeSecurity(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .basicAuthenticationSecurity(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .apiKeySecurity(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + case .oauth2ImplicitSecurity(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + case .oauth2PasswordSecurity(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + case .oauth2ApplicationSecurity(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 5) + case .oauth2AccessCodeSecurity(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 6) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_SecurityRequirement: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".SecurityRequirement" + + public var additionalProperties: [Openapi_V2_NamedStringArray] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V2_StringArray: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".StringArray" + + public var value: [String] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedStringField(value: &self.value) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.value.isEmpty { + try visitor.visitRepeatedStringField(value: self.value, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V2_Tag: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Tag" + + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var externalDocs: Openapi_V2_ExternalDocs { + get {return _storage._externalDocs ?? Openapi_V2_ExternalDocs()} + set {_uniqueStorage()._externalDocs = newValue} + } + /// Returns true if `externalDocs` has been explicitly set. + public var hasExternalDocs: Bool {return _storage._externalDocs != nil} + /// Clears the value of `externalDocs`. Subsequent reads from it will return its default value. + public mutating func clearExternalDocs() {_storage._externalDocs = nil} + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 3: try decoder.decodeSingularMessageField(value: &_storage._externalDocs) + case 4: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 2) + } + if let v = _storage._externalDocs { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 4) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_TypeItem: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".TypeItem" + + public var value: [String] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedStringField(value: &self.value) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.value.isEmpty { + try visitor.visitRepeatedStringField(value: self.value, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Any property starting with x- is valid. +public struct Openapi_V2_VendorExtension: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".VendorExtension" + + public var additionalProperties: [Openapi_V2_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V2_Xml: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Xml" + + public var name: String = String() + + public var namespace: String = String() + + public var prefix: String = String() + + public var attribute: Bool = false + + public var wrapped: Bool = false + + public var vendorExtension: [Openapi_V2_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.name) + case 2: try decoder.decodeSingularStringField(value: &self.namespace) + case 3: try decoder.decodeSingularStringField(value: &self.prefix) + case 4: try decoder.decodeSingularBoolField(value: &self.attribute) + case 5: try decoder.decodeSingularBoolField(value: &self.wrapped) + case 6: try decoder.decodeRepeatedMessageField(value: &self.vendorExtension) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.name.isEmpty { + try visitor.visitSingularStringField(value: self.name, fieldNumber: 1) + } + if !self.namespace.isEmpty { + try visitor.visitSingularStringField(value: self.namespace, fieldNumber: 2) + } + if !self.prefix.isEmpty { + try visitor.visitSingularStringField(value: self.prefix, fieldNumber: 3) + } + if self.attribute != false { + try visitor.visitSingularBoolField(value: self.attribute, fieldNumber: 4) + } + if self.wrapped != false { + try visitor.visitSingularBoolField(value: self.wrapped, fieldNumber: 5) + } + if !self.vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: self.vendorExtension, fieldNumber: 6) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +// MARK: - Code below here is support for the SwiftProtobuf runtime. + +fileprivate let _protobuf_package = "openapi.v2" + +extension Openapi_V2_AdditionalPropertiesItem: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "schema"), + 2: .same(proto: "boolean"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V2_AdditionalPropertiesItem.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_AdditionalPropertiesItem) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Any: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "value"), + 2: .same(proto: "yaml"), + ] + + fileprivate class _StorageClass { + var _value: SwiftProtobuf.Google_Protobuf_Any? = nil + var _yaml: String = String() + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _value = source._value + _yaml = source._yaml + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Any) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._value != other_storage._value {return false} + if _storage._yaml != other_storage._yaml {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_ApiKeySecurity: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "type"), + 2: .same(proto: "name"), + 3: .same(proto: "in"), + 4: .same(proto: "description"), + 5: .standard(proto: "vendor_extension"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_ApiKeySecurity) -> Bool { + if self.type != other.type {return false} + if self.name != other.name {return false} + if self.`in` != other.`in` {return false} + if self.description_p != other.description_p {return false} + if self.vendorExtension != other.vendorExtension {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_BasicAuthenticationSecurity: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "type"), + 2: .same(proto: "description"), + 3: .standard(proto: "vendor_extension"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_BasicAuthenticationSecurity) -> Bool { + if self.type != other.type {return false} + if self.description_p != other.description_p {return false} + if self.vendorExtension != other.vendorExtension {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_BodyParameter: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "description"), + 2: .same(proto: "name"), + 3: .same(proto: "in"), + 4: .same(proto: "required"), + 5: .same(proto: "schema"), + 6: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _description_p: String = String() + var _name: String = String() + var _in: String = String() + var _required: Bool = false + var _schema: Openapi_V2_Schema? = nil + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _description_p = source._description_p + _name = source._name + _in = source._in + _required = source._required + _schema = source._schema + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_BodyParameter) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._description_p != other_storage._description_p {return false} + if _storage._name != other_storage._name {return false} + if _storage._in != other_storage._in {return false} + if _storage._required != other_storage._required {return false} + if _storage._schema != other_storage._schema {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Contact: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "url"), + 3: .same(proto: "email"), + 4: .standard(proto: "vendor_extension"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Contact) -> Bool { + if self.name != other.name {return false} + if self.url != other.url {return false} + if self.email != other.email {return false} + if self.vendorExtension != other.vendorExtension {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Default: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Default) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Definitions: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Definitions) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Document: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "swagger"), + 2: .same(proto: "info"), + 3: .same(proto: "host"), + 4: .standard(proto: "base_path"), + 5: .same(proto: "schemes"), + 6: .same(proto: "consumes"), + 7: .same(proto: "produces"), + 8: .same(proto: "paths"), + 9: .same(proto: "definitions"), + 10: .same(proto: "parameters"), + 11: .same(proto: "responses"), + 12: .same(proto: "security"), + 13: .standard(proto: "security_definitions"), + 14: .same(proto: "tags"), + 15: .standard(proto: "external_docs"), + 16: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _swagger: String = String() + var _info: Openapi_V2_Info? = nil + var _host: String = String() + var _basePath: String = String() + var _schemes: [String] = [] + var _consumes: [String] = [] + var _produces: [String] = [] + var _paths: Openapi_V2_Paths? = nil + var _definitions: Openapi_V2_Definitions? = nil + var _parameters: Openapi_V2_ParameterDefinitions? = nil + var _responses: Openapi_V2_ResponseDefinitions? = nil + var _security: [Openapi_V2_SecurityRequirement] = [] + var _securityDefinitions: Openapi_V2_SecurityDefinitions? = nil + var _tags: [Openapi_V2_Tag] = [] + var _externalDocs: Openapi_V2_ExternalDocs? = nil + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _swagger = source._swagger + _info = source._info + _host = source._host + _basePath = source._basePath + _schemes = source._schemes + _consumes = source._consumes + _produces = source._produces + _paths = source._paths + _definitions = source._definitions + _parameters = source._parameters + _responses = source._responses + _security = source._security + _securityDefinitions = source._securityDefinitions + _tags = source._tags + _externalDocs = source._externalDocs + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Document) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._swagger != other_storage._swagger {return false} + if _storage._info != other_storage._info {return false} + if _storage._host != other_storage._host {return false} + if _storage._basePath != other_storage._basePath {return false} + if _storage._schemes != other_storage._schemes {return false} + if _storage._consumes != other_storage._consumes {return false} + if _storage._produces != other_storage._produces {return false} + if _storage._paths != other_storage._paths {return false} + if _storage._definitions != other_storage._definitions {return false} + if _storage._parameters != other_storage._parameters {return false} + if _storage._responses != other_storage._responses {return false} + if _storage._security != other_storage._security {return false} + if _storage._securityDefinitions != other_storage._securityDefinitions {return false} + if _storage._tags != other_storage._tags {return false} + if _storage._externalDocs != other_storage._externalDocs {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Examples: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Examples) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_ExternalDocs: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "description"), + 2: .same(proto: "url"), + 3: .standard(proto: "vendor_extension"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_ExternalDocs) -> Bool { + if self.description_p != other.description_p {return false} + if self.url != other.url {return false} + if self.vendorExtension != other.vendorExtension {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_FileSchema: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "format"), + 2: .same(proto: "title"), + 3: .same(proto: "description"), + 4: .same(proto: "default"), + 5: .same(proto: "required"), + 6: .same(proto: "type"), + 7: .standard(proto: "read_only"), + 8: .standard(proto: "external_docs"), + 9: .same(proto: "example"), + 10: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _format: String = String() + var _title: String = String() + var _description_p: String = String() + var _default: Openapi_V2_Any? = nil + var _required: [String] = [] + var _type: String = String() + var _readOnly: Bool = false + var _externalDocs: Openapi_V2_ExternalDocs? = nil + var _example: Openapi_V2_Any? = nil + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _format = source._format + _title = source._title + _description_p = source._description_p + _default = source._default + _required = source._required + _type = source._type + _readOnly = source._readOnly + _externalDocs = source._externalDocs + _example = source._example + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_FileSchema) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._format != other_storage._format {return false} + if _storage._title != other_storage._title {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._default != other_storage._default {return false} + if _storage._required != other_storage._required {return false} + if _storage._type != other_storage._type {return false} + if _storage._readOnly != other_storage._readOnly {return false} + if _storage._externalDocs != other_storage._externalDocs {return false} + if _storage._example != other_storage._example {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_FormDataParameterSubSchema: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "required"), + 2: .same(proto: "in"), + 3: .same(proto: "description"), + 4: .same(proto: "name"), + 5: .standard(proto: "allow_empty_value"), + 6: .same(proto: "type"), + 7: .same(proto: "format"), + 8: .same(proto: "items"), + 9: .standard(proto: "collection_format"), + 10: .same(proto: "default"), + 11: .same(proto: "maximum"), + 12: .standard(proto: "exclusive_maximum"), + 13: .same(proto: "minimum"), + 14: .standard(proto: "exclusive_minimum"), + 15: .standard(proto: "max_length"), + 16: .standard(proto: "min_length"), + 17: .same(proto: "pattern"), + 18: .standard(proto: "max_items"), + 19: .standard(proto: "min_items"), + 20: .standard(proto: "unique_items"), + 21: .same(proto: "enum"), + 22: .standard(proto: "multiple_of"), + 23: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _required: Bool = false + var _in: String = String() + var _description_p: String = String() + var _name: String = String() + var _allowEmptyValue: Bool = false + var _type: String = String() + var _format: String = String() + var _items: Openapi_V2_PrimitivesItems? = nil + var _collectionFormat: String = String() + var _default: Openapi_V2_Any? = nil + var _maximum: Double = 0 + var _exclusiveMaximum: Bool = false + var _minimum: Double = 0 + var _exclusiveMinimum: Bool = false + var _maxLength: Int64 = 0 + var _minLength: Int64 = 0 + var _pattern: String = String() + var _maxItems: Int64 = 0 + var _minItems: Int64 = 0 + var _uniqueItems: Bool = false + var _enum: [Openapi_V2_Any] = [] + var _multipleOf: Double = 0 + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _required = source._required + _in = source._in + _description_p = source._description_p + _name = source._name + _allowEmptyValue = source._allowEmptyValue + _type = source._type + _format = source._format + _items = source._items + _collectionFormat = source._collectionFormat + _default = source._default + _maximum = source._maximum + _exclusiveMaximum = source._exclusiveMaximum + _minimum = source._minimum + _exclusiveMinimum = source._exclusiveMinimum + _maxLength = source._maxLength + _minLength = source._minLength + _pattern = source._pattern + _maxItems = source._maxItems + _minItems = source._minItems + _uniqueItems = source._uniqueItems + _enum = source._enum + _multipleOf = source._multipleOf + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_FormDataParameterSubSchema) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._required != other_storage._required {return false} + if _storage._in != other_storage._in {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._name != other_storage._name {return false} + if _storage._allowEmptyValue != other_storage._allowEmptyValue {return false} + if _storage._type != other_storage._type {return false} + if _storage._format != other_storage._format {return false} + if _storage._items != other_storage._items {return false} + if _storage._collectionFormat != other_storage._collectionFormat {return false} + if _storage._default != other_storage._default {return false} + if _storage._maximum != other_storage._maximum {return false} + if _storage._exclusiveMaximum != other_storage._exclusiveMaximum {return false} + if _storage._minimum != other_storage._minimum {return false} + if _storage._exclusiveMinimum != other_storage._exclusiveMinimum {return false} + if _storage._maxLength != other_storage._maxLength {return false} + if _storage._minLength != other_storage._minLength {return false} + if _storage._pattern != other_storage._pattern {return false} + if _storage._maxItems != other_storage._maxItems {return false} + if _storage._minItems != other_storage._minItems {return false} + if _storage._uniqueItems != other_storage._uniqueItems {return false} + if _storage._enum != other_storage._enum {return false} + if _storage._multipleOf != other_storage._multipleOf {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Header: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "type"), + 2: .same(proto: "format"), + 3: .same(proto: "items"), + 4: .standard(proto: "collection_format"), + 5: .same(proto: "default"), + 6: .same(proto: "maximum"), + 7: .standard(proto: "exclusive_maximum"), + 8: .same(proto: "minimum"), + 9: .standard(proto: "exclusive_minimum"), + 10: .standard(proto: "max_length"), + 11: .standard(proto: "min_length"), + 12: .same(proto: "pattern"), + 13: .standard(proto: "max_items"), + 14: .standard(proto: "min_items"), + 15: .standard(proto: "unique_items"), + 16: .same(proto: "enum"), + 17: .standard(proto: "multiple_of"), + 18: .same(proto: "description"), + 19: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _type: String = String() + var _format: String = String() + var _items: Openapi_V2_PrimitivesItems? = nil + var _collectionFormat: String = String() + var _default: Openapi_V2_Any? = nil + var _maximum: Double = 0 + var _exclusiveMaximum: Bool = false + var _minimum: Double = 0 + var _exclusiveMinimum: Bool = false + var _maxLength: Int64 = 0 + var _minLength: Int64 = 0 + var _pattern: String = String() + var _maxItems: Int64 = 0 + var _minItems: Int64 = 0 + var _uniqueItems: Bool = false + var _enum: [Openapi_V2_Any] = [] + var _multipleOf: Double = 0 + var _description_p: String = String() + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _type = source._type + _format = source._format + _items = source._items + _collectionFormat = source._collectionFormat + _default = source._default + _maximum = source._maximum + _exclusiveMaximum = source._exclusiveMaximum + _minimum = source._minimum + _exclusiveMinimum = source._exclusiveMinimum + _maxLength = source._maxLength + _minLength = source._minLength + _pattern = source._pattern + _maxItems = source._maxItems + _minItems = source._minItems + _uniqueItems = source._uniqueItems + _enum = source._enum + _multipleOf = source._multipleOf + _description_p = source._description_p + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Header) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._type != other_storage._type {return false} + if _storage._format != other_storage._format {return false} + if _storage._items != other_storage._items {return false} + if _storage._collectionFormat != other_storage._collectionFormat {return false} + if _storage._default != other_storage._default {return false} + if _storage._maximum != other_storage._maximum {return false} + if _storage._exclusiveMaximum != other_storage._exclusiveMaximum {return false} + if _storage._minimum != other_storage._minimum {return false} + if _storage._exclusiveMinimum != other_storage._exclusiveMinimum {return false} + if _storage._maxLength != other_storage._maxLength {return false} + if _storage._minLength != other_storage._minLength {return false} + if _storage._pattern != other_storage._pattern {return false} + if _storage._maxItems != other_storage._maxItems {return false} + if _storage._minItems != other_storage._minItems {return false} + if _storage._uniqueItems != other_storage._uniqueItems {return false} + if _storage._enum != other_storage._enum {return false} + if _storage._multipleOf != other_storage._multipleOf {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_HeaderParameterSubSchema: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "required"), + 2: .same(proto: "in"), + 3: .same(proto: "description"), + 4: .same(proto: "name"), + 5: .same(proto: "type"), + 6: .same(proto: "format"), + 7: .same(proto: "items"), + 8: .standard(proto: "collection_format"), + 9: .same(proto: "default"), + 10: .same(proto: "maximum"), + 11: .standard(proto: "exclusive_maximum"), + 12: .same(proto: "minimum"), + 13: .standard(proto: "exclusive_minimum"), + 14: .standard(proto: "max_length"), + 15: .standard(proto: "min_length"), + 16: .same(proto: "pattern"), + 17: .standard(proto: "max_items"), + 18: .standard(proto: "min_items"), + 19: .standard(proto: "unique_items"), + 20: .same(proto: "enum"), + 21: .standard(proto: "multiple_of"), + 22: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _required: Bool = false + var _in: String = String() + var _description_p: String = String() + var _name: String = String() + var _type: String = String() + var _format: String = String() + var _items: Openapi_V2_PrimitivesItems? = nil + var _collectionFormat: String = String() + var _default: Openapi_V2_Any? = nil + var _maximum: Double = 0 + var _exclusiveMaximum: Bool = false + var _minimum: Double = 0 + var _exclusiveMinimum: Bool = false + var _maxLength: Int64 = 0 + var _minLength: Int64 = 0 + var _pattern: String = String() + var _maxItems: Int64 = 0 + var _minItems: Int64 = 0 + var _uniqueItems: Bool = false + var _enum: [Openapi_V2_Any] = [] + var _multipleOf: Double = 0 + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _required = source._required + _in = source._in + _description_p = source._description_p + _name = source._name + _type = source._type + _format = source._format + _items = source._items + _collectionFormat = source._collectionFormat + _default = source._default + _maximum = source._maximum + _exclusiveMaximum = source._exclusiveMaximum + _minimum = source._minimum + _exclusiveMinimum = source._exclusiveMinimum + _maxLength = source._maxLength + _minLength = source._minLength + _pattern = source._pattern + _maxItems = source._maxItems + _minItems = source._minItems + _uniqueItems = source._uniqueItems + _enum = source._enum + _multipleOf = source._multipleOf + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_HeaderParameterSubSchema) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._required != other_storage._required {return false} + if _storage._in != other_storage._in {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._name != other_storage._name {return false} + if _storage._type != other_storage._type {return false} + if _storage._format != other_storage._format {return false} + if _storage._items != other_storage._items {return false} + if _storage._collectionFormat != other_storage._collectionFormat {return false} + if _storage._default != other_storage._default {return false} + if _storage._maximum != other_storage._maximum {return false} + if _storage._exclusiveMaximum != other_storage._exclusiveMaximum {return false} + if _storage._minimum != other_storage._minimum {return false} + if _storage._exclusiveMinimum != other_storage._exclusiveMinimum {return false} + if _storage._maxLength != other_storage._maxLength {return false} + if _storage._minLength != other_storage._minLength {return false} + if _storage._pattern != other_storage._pattern {return false} + if _storage._maxItems != other_storage._maxItems {return false} + if _storage._minItems != other_storage._minItems {return false} + if _storage._uniqueItems != other_storage._uniqueItems {return false} + if _storage._enum != other_storage._enum {return false} + if _storage._multipleOf != other_storage._multipleOf {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Headers: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Headers) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Info: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "title"), + 2: .same(proto: "version"), + 3: .same(proto: "description"), + 4: .standard(proto: "terms_of_service"), + 5: .same(proto: "contact"), + 6: .same(proto: "license"), + 7: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _title: String = String() + var _version: String = String() + var _description_p: String = String() + var _termsOfService: String = String() + var _contact: Openapi_V2_Contact? = nil + var _license: Openapi_V2_License? = nil + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _title = source._title + _version = source._version + _description_p = source._description_p + _termsOfService = source._termsOfService + _contact = source._contact + _license = source._license + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Info) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._title != other_storage._title {return false} + if _storage._version != other_storage._version {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._termsOfService != other_storage._termsOfService {return false} + if _storage._contact != other_storage._contact {return false} + if _storage._license != other_storage._license {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_ItemsItem: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "schema"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_ItemsItem) -> Bool { + if self.schema != other.schema {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_JsonReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "_ref"), + 2: .same(proto: "description"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_JsonReference) -> Bool { + if self.ref != other.ref {return false} + if self.description_p != other.description_p {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_License: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "url"), + 3: .standard(proto: "vendor_extension"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_License) -> Bool { + if self.name != other.name {return false} + if self.url != other.url {return false} + if self.vendorExtension != other.vendorExtension {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_NamedAny: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V2_Any? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_NamedAny) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_NamedHeader: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V2_Header? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_NamedHeader) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_NamedParameter: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V2_Parameter? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_NamedParameter) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_NamedPathItem: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V2_PathItem? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_NamedPathItem) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_NamedResponse: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V2_Response? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_NamedResponse) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_NamedResponseValue: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V2_ResponseValue? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_NamedResponseValue) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_NamedSchema: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V2_Schema? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_NamedSchema) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_NamedSecurityDefinitionsItem: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V2_SecurityDefinitionsItem? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_NamedSecurityDefinitionsItem) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_NamedString: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_NamedString) -> Bool { + if self.name != other.name {return false} + if self.value != other.value {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_NamedStringArray: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V2_StringArray? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_NamedStringArray) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_NonBodyParameter: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "header_parameter_sub_schema"), + 2: .standard(proto: "form_data_parameter_sub_schema"), + 3: .standard(proto: "query_parameter_sub_schema"), + 4: .standard(proto: "path_parameter_sub_schema"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V2_NonBodyParameter.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_NonBodyParameter) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Oauth2AccessCodeSecurity: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "type"), + 2: .same(proto: "flow"), + 3: .same(proto: "scopes"), + 4: .standard(proto: "authorization_url"), + 5: .standard(proto: "token_url"), + 6: .same(proto: "description"), + 7: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _type: String = String() + var _flow: String = String() + var _scopes: Openapi_V2_Oauth2Scopes? = nil + var _authorizationURL: String = String() + var _tokenURL: String = String() + var _description_p: String = String() + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _type = source._type + _flow = source._flow + _scopes = source._scopes + _authorizationURL = source._authorizationURL + _tokenURL = source._tokenURL + _description_p = source._description_p + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Oauth2AccessCodeSecurity) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._type != other_storage._type {return false} + if _storage._flow != other_storage._flow {return false} + if _storage._scopes != other_storage._scopes {return false} + if _storage._authorizationURL != other_storage._authorizationURL {return false} + if _storage._tokenURL != other_storage._tokenURL {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Oauth2ApplicationSecurity: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "type"), + 2: .same(proto: "flow"), + 3: .same(proto: "scopes"), + 4: .standard(proto: "token_url"), + 5: .same(proto: "description"), + 6: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _type: String = String() + var _flow: String = String() + var _scopes: Openapi_V2_Oauth2Scopes? = nil + var _tokenURL: String = String() + var _description_p: String = String() + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _type = source._type + _flow = source._flow + _scopes = source._scopes + _tokenURL = source._tokenURL + _description_p = source._description_p + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Oauth2ApplicationSecurity) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._type != other_storage._type {return false} + if _storage._flow != other_storage._flow {return false} + if _storage._scopes != other_storage._scopes {return false} + if _storage._tokenURL != other_storage._tokenURL {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Oauth2ImplicitSecurity: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "type"), + 2: .same(proto: "flow"), + 3: .same(proto: "scopes"), + 4: .standard(proto: "authorization_url"), + 5: .same(proto: "description"), + 6: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _type: String = String() + var _flow: String = String() + var _scopes: Openapi_V2_Oauth2Scopes? = nil + var _authorizationURL: String = String() + var _description_p: String = String() + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _type = source._type + _flow = source._flow + _scopes = source._scopes + _authorizationURL = source._authorizationURL + _description_p = source._description_p + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Oauth2ImplicitSecurity) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._type != other_storage._type {return false} + if _storage._flow != other_storage._flow {return false} + if _storage._scopes != other_storage._scopes {return false} + if _storage._authorizationURL != other_storage._authorizationURL {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Oauth2PasswordSecurity: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "type"), + 2: .same(proto: "flow"), + 3: .same(proto: "scopes"), + 4: .standard(proto: "token_url"), + 5: .same(proto: "description"), + 6: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _type: String = String() + var _flow: String = String() + var _scopes: Openapi_V2_Oauth2Scopes? = nil + var _tokenURL: String = String() + var _description_p: String = String() + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _type = source._type + _flow = source._flow + _scopes = source._scopes + _tokenURL = source._tokenURL + _description_p = source._description_p + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Oauth2PasswordSecurity) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._type != other_storage._type {return false} + if _storage._flow != other_storage._flow {return false} + if _storage._scopes != other_storage._scopes {return false} + if _storage._tokenURL != other_storage._tokenURL {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Oauth2Scopes: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Oauth2Scopes) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Operation: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "tags"), + 2: .same(proto: "summary"), + 3: .same(proto: "description"), + 4: .standard(proto: "external_docs"), + 5: .standard(proto: "operation_id"), + 6: .same(proto: "produces"), + 7: .same(proto: "consumes"), + 8: .same(proto: "parameters"), + 9: .same(proto: "responses"), + 10: .same(proto: "schemes"), + 11: .same(proto: "deprecated"), + 12: .same(proto: "security"), + 13: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _tags: [String] = [] + var _summary: String = String() + var _description_p: String = String() + var _externalDocs: Openapi_V2_ExternalDocs? = nil + var _operationID: String = String() + var _produces: [String] = [] + var _consumes: [String] = [] + var _parameters: [Openapi_V2_ParametersItem] = [] + var _responses: Openapi_V2_Responses? = nil + var _schemes: [String] = [] + var _deprecated: Bool = false + var _security: [Openapi_V2_SecurityRequirement] = [] + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _tags = source._tags + _summary = source._summary + _description_p = source._description_p + _externalDocs = source._externalDocs + _operationID = source._operationID + _produces = source._produces + _consumes = source._consumes + _parameters = source._parameters + _responses = source._responses + _schemes = source._schemes + _deprecated = source._deprecated + _security = source._security + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Operation) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._tags != other_storage._tags {return false} + if _storage._summary != other_storage._summary {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._externalDocs != other_storage._externalDocs {return false} + if _storage._operationID != other_storage._operationID {return false} + if _storage._produces != other_storage._produces {return false} + if _storage._consumes != other_storage._consumes {return false} + if _storage._parameters != other_storage._parameters {return false} + if _storage._responses != other_storage._responses {return false} + if _storage._schemes != other_storage._schemes {return false} + if _storage._deprecated != other_storage._deprecated {return false} + if _storage._security != other_storage._security {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Parameter: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "body_parameter"), + 2: .standard(proto: "non_body_parameter"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V2_Parameter.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Parameter) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_ParameterDefinitions: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_ParameterDefinitions) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_ParametersItem: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "parameter"), + 2: .standard(proto: "json_reference"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V2_ParametersItem.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_ParametersItem) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_PathItem: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "_ref"), + 2: .same(proto: "get"), + 3: .same(proto: "put"), + 4: .same(proto: "post"), + 5: .same(proto: "delete"), + 6: .same(proto: "options"), + 7: .same(proto: "head"), + 8: .same(proto: "patch"), + 9: .same(proto: "parameters"), + 10: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _ref: String = String() + var _get: Openapi_V2_Operation? = nil + var _put: Openapi_V2_Operation? = nil + var _post: Openapi_V2_Operation? = nil + var _delete: Openapi_V2_Operation? = nil + var _options: Openapi_V2_Operation? = nil + var _head: Openapi_V2_Operation? = nil + var _patch: Openapi_V2_Operation? = nil + var _parameters: [Openapi_V2_ParametersItem] = [] + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _ref = source._ref + _get = source._get + _put = source._put + _post = source._post + _delete = source._delete + _options = source._options + _head = source._head + _patch = source._patch + _parameters = source._parameters + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_PathItem) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._ref != other_storage._ref {return false} + if _storage._get != other_storage._get {return false} + if _storage._put != other_storage._put {return false} + if _storage._post != other_storage._post {return false} + if _storage._delete != other_storage._delete {return false} + if _storage._options != other_storage._options {return false} + if _storage._head != other_storage._head {return false} + if _storage._patch != other_storage._patch {return false} + if _storage._parameters != other_storage._parameters {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_PathParameterSubSchema: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "required"), + 2: .same(proto: "in"), + 3: .same(proto: "description"), + 4: .same(proto: "name"), + 5: .same(proto: "type"), + 6: .same(proto: "format"), + 7: .same(proto: "items"), + 8: .standard(proto: "collection_format"), + 9: .same(proto: "default"), + 10: .same(proto: "maximum"), + 11: .standard(proto: "exclusive_maximum"), + 12: .same(proto: "minimum"), + 13: .standard(proto: "exclusive_minimum"), + 14: .standard(proto: "max_length"), + 15: .standard(proto: "min_length"), + 16: .same(proto: "pattern"), + 17: .standard(proto: "max_items"), + 18: .standard(proto: "min_items"), + 19: .standard(proto: "unique_items"), + 20: .same(proto: "enum"), + 21: .standard(proto: "multiple_of"), + 22: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _required: Bool = false + var _in: String = String() + var _description_p: String = String() + var _name: String = String() + var _type: String = String() + var _format: String = String() + var _items: Openapi_V2_PrimitivesItems? = nil + var _collectionFormat: String = String() + var _default: Openapi_V2_Any? = nil + var _maximum: Double = 0 + var _exclusiveMaximum: Bool = false + var _minimum: Double = 0 + var _exclusiveMinimum: Bool = false + var _maxLength: Int64 = 0 + var _minLength: Int64 = 0 + var _pattern: String = String() + var _maxItems: Int64 = 0 + var _minItems: Int64 = 0 + var _uniqueItems: Bool = false + var _enum: [Openapi_V2_Any] = [] + var _multipleOf: Double = 0 + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _required = source._required + _in = source._in + _description_p = source._description_p + _name = source._name + _type = source._type + _format = source._format + _items = source._items + _collectionFormat = source._collectionFormat + _default = source._default + _maximum = source._maximum + _exclusiveMaximum = source._exclusiveMaximum + _minimum = source._minimum + _exclusiveMinimum = source._exclusiveMinimum + _maxLength = source._maxLength + _minLength = source._minLength + _pattern = source._pattern + _maxItems = source._maxItems + _minItems = source._minItems + _uniqueItems = source._uniqueItems + _enum = source._enum + _multipleOf = source._multipleOf + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_PathParameterSubSchema) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._required != other_storage._required {return false} + if _storage._in != other_storage._in {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._name != other_storage._name {return false} + if _storage._type != other_storage._type {return false} + if _storage._format != other_storage._format {return false} + if _storage._items != other_storage._items {return false} + if _storage._collectionFormat != other_storage._collectionFormat {return false} + if _storage._default != other_storage._default {return false} + if _storage._maximum != other_storage._maximum {return false} + if _storage._exclusiveMaximum != other_storage._exclusiveMaximum {return false} + if _storage._minimum != other_storage._minimum {return false} + if _storage._exclusiveMinimum != other_storage._exclusiveMinimum {return false} + if _storage._maxLength != other_storage._maxLength {return false} + if _storage._minLength != other_storage._minLength {return false} + if _storage._pattern != other_storage._pattern {return false} + if _storage._maxItems != other_storage._maxItems {return false} + if _storage._minItems != other_storage._minItems {return false} + if _storage._uniqueItems != other_storage._uniqueItems {return false} + if _storage._enum != other_storage._enum {return false} + if _storage._multipleOf != other_storage._multipleOf {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Paths: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "vendor_extension"), + 2: .same(proto: "path"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Paths) -> Bool { + if self.vendorExtension != other.vendorExtension {return false} + if self.path != other.path {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_PrimitivesItems: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "type"), + 2: .same(proto: "format"), + 3: .same(proto: "items"), + 4: .standard(proto: "collection_format"), + 5: .same(proto: "default"), + 6: .same(proto: "maximum"), + 7: .standard(proto: "exclusive_maximum"), + 8: .same(proto: "minimum"), + 9: .standard(proto: "exclusive_minimum"), + 10: .standard(proto: "max_length"), + 11: .standard(proto: "min_length"), + 12: .same(proto: "pattern"), + 13: .standard(proto: "max_items"), + 14: .standard(proto: "min_items"), + 15: .standard(proto: "unique_items"), + 16: .same(proto: "enum"), + 17: .standard(proto: "multiple_of"), + 18: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _type: String = String() + var _format: String = String() + var _items: Openapi_V2_PrimitivesItems? = nil + var _collectionFormat: String = String() + var _default: Openapi_V2_Any? = nil + var _maximum: Double = 0 + var _exclusiveMaximum: Bool = false + var _minimum: Double = 0 + var _exclusiveMinimum: Bool = false + var _maxLength: Int64 = 0 + var _minLength: Int64 = 0 + var _pattern: String = String() + var _maxItems: Int64 = 0 + var _minItems: Int64 = 0 + var _uniqueItems: Bool = false + var _enum: [Openapi_V2_Any] = [] + var _multipleOf: Double = 0 + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _type = source._type + _format = source._format + _items = source._items + _collectionFormat = source._collectionFormat + _default = source._default + _maximum = source._maximum + _exclusiveMaximum = source._exclusiveMaximum + _minimum = source._minimum + _exclusiveMinimum = source._exclusiveMinimum + _maxLength = source._maxLength + _minLength = source._minLength + _pattern = source._pattern + _maxItems = source._maxItems + _minItems = source._minItems + _uniqueItems = source._uniqueItems + _enum = source._enum + _multipleOf = source._multipleOf + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_PrimitivesItems) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._type != other_storage._type {return false} + if _storage._format != other_storage._format {return false} + if _storage._items != other_storage._items {return false} + if _storage._collectionFormat != other_storage._collectionFormat {return false} + if _storage._default != other_storage._default {return false} + if _storage._maximum != other_storage._maximum {return false} + if _storage._exclusiveMaximum != other_storage._exclusiveMaximum {return false} + if _storage._minimum != other_storage._minimum {return false} + if _storage._exclusiveMinimum != other_storage._exclusiveMinimum {return false} + if _storage._maxLength != other_storage._maxLength {return false} + if _storage._minLength != other_storage._minLength {return false} + if _storage._pattern != other_storage._pattern {return false} + if _storage._maxItems != other_storage._maxItems {return false} + if _storage._minItems != other_storage._minItems {return false} + if _storage._uniqueItems != other_storage._uniqueItems {return false} + if _storage._enum != other_storage._enum {return false} + if _storage._multipleOf != other_storage._multipleOf {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Properties: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Properties) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_QueryParameterSubSchema: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "required"), + 2: .same(proto: "in"), + 3: .same(proto: "description"), + 4: .same(proto: "name"), + 5: .standard(proto: "allow_empty_value"), + 6: .same(proto: "type"), + 7: .same(proto: "format"), + 8: .same(proto: "items"), + 9: .standard(proto: "collection_format"), + 10: .same(proto: "default"), + 11: .same(proto: "maximum"), + 12: .standard(proto: "exclusive_maximum"), + 13: .same(proto: "minimum"), + 14: .standard(proto: "exclusive_minimum"), + 15: .standard(proto: "max_length"), + 16: .standard(proto: "min_length"), + 17: .same(proto: "pattern"), + 18: .standard(proto: "max_items"), + 19: .standard(proto: "min_items"), + 20: .standard(proto: "unique_items"), + 21: .same(proto: "enum"), + 22: .standard(proto: "multiple_of"), + 23: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _required: Bool = false + var _in: String = String() + var _description_p: String = String() + var _name: String = String() + var _allowEmptyValue: Bool = false + var _type: String = String() + var _format: String = String() + var _items: Openapi_V2_PrimitivesItems? = nil + var _collectionFormat: String = String() + var _default: Openapi_V2_Any? = nil + var _maximum: Double = 0 + var _exclusiveMaximum: Bool = false + var _minimum: Double = 0 + var _exclusiveMinimum: Bool = false + var _maxLength: Int64 = 0 + var _minLength: Int64 = 0 + var _pattern: String = String() + var _maxItems: Int64 = 0 + var _minItems: Int64 = 0 + var _uniqueItems: Bool = false + var _enum: [Openapi_V2_Any] = [] + var _multipleOf: Double = 0 + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _required = source._required + _in = source._in + _description_p = source._description_p + _name = source._name + _allowEmptyValue = source._allowEmptyValue + _type = source._type + _format = source._format + _items = source._items + _collectionFormat = source._collectionFormat + _default = source._default + _maximum = source._maximum + _exclusiveMaximum = source._exclusiveMaximum + _minimum = source._minimum + _exclusiveMinimum = source._exclusiveMinimum + _maxLength = source._maxLength + _minLength = source._minLength + _pattern = source._pattern + _maxItems = source._maxItems + _minItems = source._minItems + _uniqueItems = source._uniqueItems + _enum = source._enum + _multipleOf = source._multipleOf + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_QueryParameterSubSchema) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._required != other_storage._required {return false} + if _storage._in != other_storage._in {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._name != other_storage._name {return false} + if _storage._allowEmptyValue != other_storage._allowEmptyValue {return false} + if _storage._type != other_storage._type {return false} + if _storage._format != other_storage._format {return false} + if _storage._items != other_storage._items {return false} + if _storage._collectionFormat != other_storage._collectionFormat {return false} + if _storage._default != other_storage._default {return false} + if _storage._maximum != other_storage._maximum {return false} + if _storage._exclusiveMaximum != other_storage._exclusiveMaximum {return false} + if _storage._minimum != other_storage._minimum {return false} + if _storage._exclusiveMinimum != other_storage._exclusiveMinimum {return false} + if _storage._maxLength != other_storage._maxLength {return false} + if _storage._minLength != other_storage._minLength {return false} + if _storage._pattern != other_storage._pattern {return false} + if _storage._maxItems != other_storage._maxItems {return false} + if _storage._minItems != other_storage._minItems {return false} + if _storage._uniqueItems != other_storage._uniqueItems {return false} + if _storage._enum != other_storage._enum {return false} + if _storage._multipleOf != other_storage._multipleOf {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Response: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "description"), + 2: .same(proto: "schema"), + 3: .same(proto: "headers"), + 4: .same(proto: "examples"), + 5: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _description_p: String = String() + var _schema: Openapi_V2_SchemaItem? = nil + var _headers: Openapi_V2_Headers? = nil + var _examples: Openapi_V2_Examples? = nil + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _description_p = source._description_p + _schema = source._schema + _headers = source._headers + _examples = source._examples + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Response) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._description_p != other_storage._description_p {return false} + if _storage._schema != other_storage._schema {return false} + if _storage._headers != other_storage._headers {return false} + if _storage._examples != other_storage._examples {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_ResponseDefinitions: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_ResponseDefinitions) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_ResponseValue: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "response"), + 2: .standard(proto: "json_reference"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V2_ResponseValue.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_ResponseValue) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Responses: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "response_code"), + 2: .standard(proto: "vendor_extension"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Responses) -> Bool { + if self.responseCode != other.responseCode {return false} + if self.vendorExtension != other.vendorExtension {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Schema: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "_ref"), + 2: .same(proto: "format"), + 3: .same(proto: "title"), + 4: .same(proto: "description"), + 5: .same(proto: "default"), + 6: .standard(proto: "multiple_of"), + 7: .same(proto: "maximum"), + 8: .standard(proto: "exclusive_maximum"), + 9: .same(proto: "minimum"), + 10: .standard(proto: "exclusive_minimum"), + 11: .standard(proto: "max_length"), + 12: .standard(proto: "min_length"), + 13: .same(proto: "pattern"), + 14: .standard(proto: "max_items"), + 15: .standard(proto: "min_items"), + 16: .standard(proto: "unique_items"), + 17: .standard(proto: "max_properties"), + 18: .standard(proto: "min_properties"), + 19: .same(proto: "required"), + 20: .same(proto: "enum"), + 21: .standard(proto: "additional_properties"), + 22: .same(proto: "type"), + 23: .same(proto: "items"), + 24: .standard(proto: "all_of"), + 25: .same(proto: "properties"), + 26: .same(proto: "discriminator"), + 27: .standard(proto: "read_only"), + 28: .same(proto: "xml"), + 29: .standard(proto: "external_docs"), + 30: .same(proto: "example"), + 31: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _ref: String = String() + var _format: String = String() + var _title: String = String() + var _description_p: String = String() + var _default: Openapi_V2_Any? = nil + var _multipleOf: Double = 0 + var _maximum: Double = 0 + var _exclusiveMaximum: Bool = false + var _minimum: Double = 0 + var _exclusiveMinimum: Bool = false + var _maxLength: Int64 = 0 + var _minLength: Int64 = 0 + var _pattern: String = String() + var _maxItems: Int64 = 0 + var _minItems: Int64 = 0 + var _uniqueItems: Bool = false + var _maxProperties: Int64 = 0 + var _minProperties: Int64 = 0 + var _required: [String] = [] + var _enum: [Openapi_V2_Any] = [] + var _additionalProperties: Openapi_V2_AdditionalPropertiesItem? = nil + var _type: Openapi_V2_TypeItem? = nil + var _items: Openapi_V2_ItemsItem? = nil + var _allOf: [Openapi_V2_Schema] = [] + var _properties: Openapi_V2_Properties? = nil + var _discriminator: String = String() + var _readOnly: Bool = false + var _xml: Openapi_V2_Xml? = nil + var _externalDocs: Openapi_V2_ExternalDocs? = nil + var _example: Openapi_V2_Any? = nil + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _ref = source._ref + _format = source._format + _title = source._title + _description_p = source._description_p + _default = source._default + _multipleOf = source._multipleOf + _maximum = source._maximum + _exclusiveMaximum = source._exclusiveMaximum + _minimum = source._minimum + _exclusiveMinimum = source._exclusiveMinimum + _maxLength = source._maxLength + _minLength = source._minLength + _pattern = source._pattern + _maxItems = source._maxItems + _minItems = source._minItems + _uniqueItems = source._uniqueItems + _maxProperties = source._maxProperties + _minProperties = source._minProperties + _required = source._required + _enum = source._enum + _additionalProperties = source._additionalProperties + _type = source._type + _items = source._items + _allOf = source._allOf + _properties = source._properties + _discriminator = source._discriminator + _readOnly = source._readOnly + _xml = source._xml + _externalDocs = source._externalDocs + _example = source._example + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Schema) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._ref != other_storage._ref {return false} + if _storage._format != other_storage._format {return false} + if _storage._title != other_storage._title {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._default != other_storage._default {return false} + if _storage._multipleOf != other_storage._multipleOf {return false} + if _storage._maximum != other_storage._maximum {return false} + if _storage._exclusiveMaximum != other_storage._exclusiveMaximum {return false} + if _storage._minimum != other_storage._minimum {return false} + if _storage._exclusiveMinimum != other_storage._exclusiveMinimum {return false} + if _storage._maxLength != other_storage._maxLength {return false} + if _storage._minLength != other_storage._minLength {return false} + if _storage._pattern != other_storage._pattern {return false} + if _storage._maxItems != other_storage._maxItems {return false} + if _storage._minItems != other_storage._minItems {return false} + if _storage._uniqueItems != other_storage._uniqueItems {return false} + if _storage._maxProperties != other_storage._maxProperties {return false} + if _storage._minProperties != other_storage._minProperties {return false} + if _storage._required != other_storage._required {return false} + if _storage._enum != other_storage._enum {return false} + if _storage._additionalProperties != other_storage._additionalProperties {return false} + if _storage._type != other_storage._type {return false} + if _storage._items != other_storage._items {return false} + if _storage._allOf != other_storage._allOf {return false} + if _storage._properties != other_storage._properties {return false} + if _storage._discriminator != other_storage._discriminator {return false} + if _storage._readOnly != other_storage._readOnly {return false} + if _storage._xml != other_storage._xml {return false} + if _storage._externalDocs != other_storage._externalDocs {return false} + if _storage._example != other_storage._example {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_SchemaItem: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "schema"), + 2: .standard(proto: "file_schema"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V2_SchemaItem.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_SchemaItem) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_SecurityDefinitions: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_SecurityDefinitions) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_SecurityDefinitionsItem: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "basic_authentication_security"), + 2: .standard(proto: "api_key_security"), + 3: .standard(proto: "oauth2_implicit_security"), + 4: .standard(proto: "oauth2_password_security"), + 5: .standard(proto: "oauth2_application_security"), + 6: .standard(proto: "oauth2_access_code_security"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V2_SecurityDefinitionsItem.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_SecurityDefinitionsItem) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_SecurityRequirement: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_SecurityRequirement) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_StringArray: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "value"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_StringArray) -> Bool { + if self.value != other.value {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Tag: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "description"), + 3: .standard(proto: "external_docs"), + 4: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _description_p: String = String() + var _externalDocs: Openapi_V2_ExternalDocs? = nil + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _description_p = source._description_p + _externalDocs = source._externalDocs + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Tag) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._externalDocs != other_storage._externalDocs {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_TypeItem: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "value"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_TypeItem) -> Bool { + if self.value != other.value {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_VendorExtension: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_VendorExtension) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Xml: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "namespace"), + 3: .same(proto: "prefix"), + 4: .same(proto: "attribute"), + 5: .same(proto: "wrapped"), + 6: .standard(proto: "vendor_extension"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Xml) -> Bool { + if self.name != other.name {return false} + if self.namespace != other.namespace {return false} + if self.prefix != other.prefix {return false} + if self.attribute != other.attribute {return false} + if self.wrapped != other.wrapped {return false} + if self.vendorExtension != other.vendorExtension {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/Gnostic/OpenAPIv3.pb.swift b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/Gnostic/OpenAPIv3.pb.swift new file mode 100644 index 000000000..0f0f71403 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/Gnostic/OpenAPIv3.pb.swift @@ -0,0 +1,8849 @@ +// DO NOT EDIT. +// +// Generated by the Swift generator plugin for the protocol buffer compiler. +// Source: github.com/googleapis/gnostic/OpenAPIv3/OpenAPIv3.proto +// +// For information on using the generated types, please see the documenation: +// https://github.com/apple/swift-protobuf/ + +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// THIS FILE IS AUTOMATICALLY GENERATED. + +import Foundation +import SwiftProtobuf + +// If the compiler emits an error on this type, it is because this file +// was generated by a version of the `protoc` Swift plug-in that is +// incompatible with the version of SwiftProtobuf to which you are linking. +// Please ensure that your are building against the same version of the API +// that was used to generate this file. +fileprivate struct _GeneratedWithProtocGenSwiftVersion: SwiftProtobuf.ProtobufAPIVersionCheck { + struct _2: SwiftProtobuf.ProtobufAPIVersion_2 {} + typealias Version = _2 +} + +public struct Openapi_V3_AdditionalPropertiesItem: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".AdditionalPropertiesItem" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var schemaOrReference: Openapi_V3_SchemaOrReference { + get { + if case .schemaOrReference(let v)? = _storage._oneof {return v} + return Openapi_V3_SchemaOrReference() + } + set {_uniqueStorage()._oneof = .schemaOrReference(newValue)} + } + + public var boolean: Bool { + get { + if case .boolean(let v)? = _storage._oneof {return v} + return false + } + set {_uniqueStorage()._oneof = .boolean(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case schemaOrReference(Openapi_V3_SchemaOrReference) + case boolean(Bool) + + public static func ==(lhs: Openapi_V3_AdditionalPropertiesItem.OneOf_Oneof, rhs: Openapi_V3_AdditionalPropertiesItem.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.schemaOrReference(let l), .schemaOrReference(let r)): return l == r + case (.boolean(let l), .boolean(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V3_SchemaOrReference? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .schemaOrReference(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .schemaOrReference(v)} + case 2: + if _storage._oneof != nil {try decoder.handleConflictingOneOf()} + var v: Bool? + try decoder.decodeSingularBoolField(value: &v) + if let v = v {_storage._oneof = .boolean(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .schemaOrReference(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .boolean(let v)?: + try visitor.visitSingularBoolField(value: v, fieldNumber: 2) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_Any: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Any" + + public var value: SwiftProtobuf.Google_Protobuf_Any { + get {return _storage._value ?? SwiftProtobuf.Google_Protobuf_Any()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var yaml: String { + get {return _storage._yaml} + set {_uniqueStorage()._yaml = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularMessageField(value: &_storage._value) + case 2: try decoder.decodeSingularStringField(value: &_storage._yaml) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + } + if !_storage._yaml.isEmpty { + try visitor.visitSingularStringField(value: _storage._yaml, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_AnyOrExpression: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".AnyOrExpression" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var any: Openapi_V3_Any { + get { + if case .any(let v)? = _storage._oneof {return v} + return Openapi_V3_Any() + } + set {_uniqueStorage()._oneof = .any(newValue)} + } + + public var expression: Openapi_V3_Expression { + get { + if case .expression(let v)? = _storage._oneof {return v} + return Openapi_V3_Expression() + } + set {_uniqueStorage()._oneof = .expression(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case any(Openapi_V3_Any) + case expression(Openapi_V3_Expression) + + public static func ==(lhs: Openapi_V3_AnyOrExpression.OneOf_Oneof, rhs: Openapi_V3_AnyOrExpression.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.any(let l), .any(let r)): return l == r + case (.expression(let l), .expression(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V3_Any? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .any(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .any(v)} + case 2: + var v: Openapi_V3_Expression? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .expression(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .expression(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .any(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .expression(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_AnysOrExpressions: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".AnysOrExpressions" + + public var additionalProperties: [Openapi_V3_NamedAnyOrExpression] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// A map of possible out-of band callbacks related to the parent operation. Each value in the map is a Path Item Object that describes a set of requests that may be initiated by the API provider and the expected responses. The key value used to identify the callback object is an expression, evaluated at runtime, that identifies a URL to use for the callback operation. +public struct Openapi_V3_Callback: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Callback" + + public var path: [Openapi_V3_NamedPathItem] = [] + + public var specificationExtension: [Openapi_V3_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.path) + case 2: try decoder.decodeRepeatedMessageField(value: &self.specificationExtension) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.path.isEmpty { + try visitor.visitRepeatedMessageField(value: self.path, fieldNumber: 1) + } + if !self.specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: self.specificationExtension, fieldNumber: 2) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V3_CallbackOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".CallbackOrReference" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var callback: Openapi_V3_Callback { + get { + if case .callback(let v)? = _storage._oneof {return v} + return Openapi_V3_Callback() + } + set {_uniqueStorage()._oneof = .callback(newValue)} + } + + public var reference: Openapi_V3_Reference { + get { + if case .reference(let v)? = _storage._oneof {return v} + return Openapi_V3_Reference() + } + set {_uniqueStorage()._oneof = .reference(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case callback(Openapi_V3_Callback) + case reference(Openapi_V3_Reference) + + public static func ==(lhs: Openapi_V3_CallbackOrReference.OneOf_Oneof, rhs: Openapi_V3_CallbackOrReference.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.callback(let l), .callback(let r)): return l == r + case (.reference(let l), .reference(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V3_Callback? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .callback(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .callback(v)} + case 2: + var v: Openapi_V3_Reference? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .reference(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .reference(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .callback(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .reference(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_CallbacksOrReferences: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".CallbacksOrReferences" + + public var additionalProperties: [Openapi_V3_NamedCallbackOrReference] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Holds a set of reusable objects for different aspects of the OAS. All objects defined within the components object will have no effect on the API unless they are explicitly referenced from properties outside the components object. +public struct Openapi_V3_Components: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Components" + + public var schemas: Openapi_V3_SchemasOrReferences { + get {return _storage._schemas ?? Openapi_V3_SchemasOrReferences()} + set {_uniqueStorage()._schemas = newValue} + } + /// Returns true if `schemas` has been explicitly set. + public var hasSchemas: Bool {return _storage._schemas != nil} + /// Clears the value of `schemas`. Subsequent reads from it will return its default value. + public mutating func clearSchemas() {_storage._schemas = nil} + + public var responses: Openapi_V3_ResponsesOrReferences { + get {return _storage._responses ?? Openapi_V3_ResponsesOrReferences()} + set {_uniqueStorage()._responses = newValue} + } + /// Returns true if `responses` has been explicitly set. + public var hasResponses: Bool {return _storage._responses != nil} + /// Clears the value of `responses`. Subsequent reads from it will return its default value. + public mutating func clearResponses() {_storage._responses = nil} + + public var parameters: Openapi_V3_ParametersOrReferences { + get {return _storage._parameters ?? Openapi_V3_ParametersOrReferences()} + set {_uniqueStorage()._parameters = newValue} + } + /// Returns true if `parameters` has been explicitly set. + public var hasParameters: Bool {return _storage._parameters != nil} + /// Clears the value of `parameters`. Subsequent reads from it will return its default value. + public mutating func clearParameters() {_storage._parameters = nil} + + public var examples: Openapi_V3_ExamplesOrReferences { + get {return _storage._examples ?? Openapi_V3_ExamplesOrReferences()} + set {_uniqueStorage()._examples = newValue} + } + /// Returns true if `examples` has been explicitly set. + public var hasExamples: Bool {return _storage._examples != nil} + /// Clears the value of `examples`. Subsequent reads from it will return its default value. + public mutating func clearExamples() {_storage._examples = nil} + + public var requestBodies: Openapi_V3_RequestBodiesOrReferences { + get {return _storage._requestBodies ?? Openapi_V3_RequestBodiesOrReferences()} + set {_uniqueStorage()._requestBodies = newValue} + } + /// Returns true if `requestBodies` has been explicitly set. + public var hasRequestBodies: Bool {return _storage._requestBodies != nil} + /// Clears the value of `requestBodies`. Subsequent reads from it will return its default value. + public mutating func clearRequestBodies() {_storage._requestBodies = nil} + + public var headers: Openapi_V3_HeadersOrReferences { + get {return _storage._headers ?? Openapi_V3_HeadersOrReferences()} + set {_uniqueStorage()._headers = newValue} + } + /// Returns true if `headers` has been explicitly set. + public var hasHeaders: Bool {return _storage._headers != nil} + /// Clears the value of `headers`. Subsequent reads from it will return its default value. + public mutating func clearHeaders() {_storage._headers = nil} + + public var securitySchemes: Openapi_V3_SecuritySchemesOrReferences { + get {return _storage._securitySchemes ?? Openapi_V3_SecuritySchemesOrReferences()} + set {_uniqueStorage()._securitySchemes = newValue} + } + /// Returns true if `securitySchemes` has been explicitly set. + public var hasSecuritySchemes: Bool {return _storage._securitySchemes != nil} + /// Clears the value of `securitySchemes`. Subsequent reads from it will return its default value. + public mutating func clearSecuritySchemes() {_storage._securitySchemes = nil} + + public var links: Openapi_V3_LinksOrReferences { + get {return _storage._links ?? Openapi_V3_LinksOrReferences()} + set {_uniqueStorage()._links = newValue} + } + /// Returns true if `links` has been explicitly set. + public var hasLinks: Bool {return _storage._links != nil} + /// Clears the value of `links`. Subsequent reads from it will return its default value. + public mutating func clearLinks() {_storage._links = nil} + + public var callbacks: Openapi_V3_CallbacksOrReferences { + get {return _storage._callbacks ?? Openapi_V3_CallbacksOrReferences()} + set {_uniqueStorage()._callbacks = newValue} + } + /// Returns true if `callbacks` has been explicitly set. + public var hasCallbacks: Bool {return _storage._callbacks != nil} + /// Clears the value of `callbacks`. Subsequent reads from it will return its default value. + public mutating func clearCallbacks() {_storage._callbacks = nil} + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularMessageField(value: &_storage._schemas) + case 2: try decoder.decodeSingularMessageField(value: &_storage._responses) + case 3: try decoder.decodeSingularMessageField(value: &_storage._parameters) + case 4: try decoder.decodeSingularMessageField(value: &_storage._examples) + case 5: try decoder.decodeSingularMessageField(value: &_storage._requestBodies) + case 6: try decoder.decodeSingularMessageField(value: &_storage._headers) + case 7: try decoder.decodeSingularMessageField(value: &_storage._securitySchemes) + case 8: try decoder.decodeSingularMessageField(value: &_storage._links) + case 9: try decoder.decodeSingularMessageField(value: &_storage._callbacks) + case 10: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if let v = _storage._schemas { + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + } + if let v = _storage._responses { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + if let v = _storage._parameters { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if let v = _storage._examples { + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + } + if let v = _storage._requestBodies { + try visitor.visitSingularMessageField(value: v, fieldNumber: 5) + } + if let v = _storage._headers { + try visitor.visitSingularMessageField(value: v, fieldNumber: 6) + } + if let v = _storage._securitySchemes { + try visitor.visitSingularMessageField(value: v, fieldNumber: 7) + } + if let v = _storage._links { + try visitor.visitSingularMessageField(value: v, fieldNumber: 8) + } + if let v = _storage._callbacks { + try visitor.visitSingularMessageField(value: v, fieldNumber: 9) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 10) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Contact information for the exposed API. +public struct Openapi_V3_Contact: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Contact" + + public var name: String = String() + + public var url: String = String() + + public var email: String = String() + + public var specificationExtension: [Openapi_V3_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.name) + case 2: try decoder.decodeSingularStringField(value: &self.url) + case 3: try decoder.decodeSingularStringField(value: &self.email) + case 4: try decoder.decodeRepeatedMessageField(value: &self.specificationExtension) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.name.isEmpty { + try visitor.visitSingularStringField(value: self.name, fieldNumber: 1) + } + if !self.url.isEmpty { + try visitor.visitSingularStringField(value: self.url, fieldNumber: 2) + } + if !self.email.isEmpty { + try visitor.visitSingularStringField(value: self.email, fieldNumber: 3) + } + if !self.specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: self.specificationExtension, fieldNumber: 4) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V3_DefaultType: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".DefaultType" + + public var oneof: Openapi_V3_DefaultType.OneOf_Oneof? = nil + + public var number: Double { + get { + if case .number(let v)? = oneof {return v} + return 0 + } + set {oneof = .number(newValue)} + } + + public var boolean: Bool { + get { + if case .boolean(let v)? = oneof {return v} + return false + } + set {oneof = .boolean(newValue)} + } + + public var string: String { + get { + if case .string(let v)? = oneof {return v} + return String() + } + set {oneof = .string(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case number(Double) + case boolean(Bool) + case string(String) + + public static func ==(lhs: Openapi_V3_DefaultType.OneOf_Oneof, rhs: Openapi_V3_DefaultType.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.number(let l), .number(let r)): return l == r + case (.boolean(let l), .boolean(let r)): return l == r + case (.string(let l), .string(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + if self.oneof != nil {try decoder.handleConflictingOneOf()} + var v: Double? + try decoder.decodeSingularDoubleField(value: &v) + if let v = v {self.oneof = .number(v)} + case 2: + if self.oneof != nil {try decoder.handleConflictingOneOf()} + var v: Bool? + try decoder.decodeSingularBoolField(value: &v) + if let v = v {self.oneof = .boolean(v)} + case 3: + if self.oneof != nil {try decoder.handleConflictingOneOf()} + var v: String? + try decoder.decodeSingularStringField(value: &v) + if let v = v {self.oneof = .string(v)} + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + switch self.oneof { + case .number(let v)?: + try visitor.visitSingularDoubleField(value: v, fieldNumber: 1) + case .boolean(let v)?: + try visitor.visitSingularBoolField(value: v, fieldNumber: 2) + case .string(let v)?: + try visitor.visitSingularStringField(value: v, fieldNumber: 3) + case nil: break + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// When request bodies or response payloads may be one of a number of different schemas, a `discriminator` object can be used to aid in serialization, deserialization, and validation. The discriminator is a specific object in a schema which is used to inform the consumer of the specification of an alternative schema based on the value associated with it. When using the discriminator, _inline_ schemas will not be considered. +public struct Openapi_V3_Discriminator: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Discriminator" + + public var propertyName: String { + get {return _storage._propertyName} + set {_uniqueStorage()._propertyName = newValue} + } + + public var mapping: Openapi_V3_Strings { + get {return _storage._mapping ?? Openapi_V3_Strings()} + set {_uniqueStorage()._mapping = newValue} + } + /// Returns true if `mapping` has been explicitly set. + public var hasMapping: Bool {return _storage._mapping != nil} + /// Clears the value of `mapping`. Subsequent reads from it will return its default value. + public mutating func clearMapping() {_storage._mapping = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._propertyName) + case 2: try decoder.decodeSingularMessageField(value: &_storage._mapping) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._propertyName.isEmpty { + try visitor.visitSingularStringField(value: _storage._propertyName, fieldNumber: 1) + } + if let v = _storage._mapping { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_Document: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Document" + + public var openapi: String { + get {return _storage._openapi} + set {_uniqueStorage()._openapi = newValue} + } + + public var info: Openapi_V3_Info { + get {return _storage._info ?? Openapi_V3_Info()} + set {_uniqueStorage()._info = newValue} + } + /// Returns true if `info` has been explicitly set. + public var hasInfo: Bool {return _storage._info != nil} + /// Clears the value of `info`. Subsequent reads from it will return its default value. + public mutating func clearInfo() {_storage._info = nil} + + public var servers: [Openapi_V3_Server] { + get {return _storage._servers} + set {_uniqueStorage()._servers = newValue} + } + + public var paths: Openapi_V3_Paths { + get {return _storage._paths ?? Openapi_V3_Paths()} + set {_uniqueStorage()._paths = newValue} + } + /// Returns true if `paths` has been explicitly set. + public var hasPaths: Bool {return _storage._paths != nil} + /// Clears the value of `paths`. Subsequent reads from it will return its default value. + public mutating func clearPaths() {_storage._paths = nil} + + public var components: Openapi_V3_Components { + get {return _storage._components ?? Openapi_V3_Components()} + set {_uniqueStorage()._components = newValue} + } + /// Returns true if `components` has been explicitly set. + public var hasComponents: Bool {return _storage._components != nil} + /// Clears the value of `components`. Subsequent reads from it will return its default value. + public mutating func clearComponents() {_storage._components = nil} + + public var security: [Openapi_V3_SecurityRequirement] { + get {return _storage._security} + set {_uniqueStorage()._security = newValue} + } + + public var tags: [Openapi_V3_Tag] { + get {return _storage._tags} + set {_uniqueStorage()._tags = newValue} + } + + public var externalDocs: Openapi_V3_ExternalDocs { + get {return _storage._externalDocs ?? Openapi_V3_ExternalDocs()} + set {_uniqueStorage()._externalDocs = newValue} + } + /// Returns true if `externalDocs` has been explicitly set. + public var hasExternalDocs: Bool {return _storage._externalDocs != nil} + /// Clears the value of `externalDocs`. Subsequent reads from it will return its default value. + public mutating func clearExternalDocs() {_storage._externalDocs = nil} + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._openapi) + case 2: try decoder.decodeSingularMessageField(value: &_storage._info) + case 3: try decoder.decodeRepeatedMessageField(value: &_storage._servers) + case 4: try decoder.decodeSingularMessageField(value: &_storage._paths) + case 5: try decoder.decodeSingularMessageField(value: &_storage._components) + case 6: try decoder.decodeRepeatedMessageField(value: &_storage._security) + case 7: try decoder.decodeRepeatedMessageField(value: &_storage._tags) + case 8: try decoder.decodeSingularMessageField(value: &_storage._externalDocs) + case 9: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._openapi.isEmpty { + try visitor.visitSingularStringField(value: _storage._openapi, fieldNumber: 1) + } + if let v = _storage._info { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + if !_storage._servers.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._servers, fieldNumber: 3) + } + if let v = _storage._paths { + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + } + if let v = _storage._components { + try visitor.visitSingularMessageField(value: v, fieldNumber: 5) + } + if !_storage._security.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._security, fieldNumber: 6) + } + if !_storage._tags.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._tags, fieldNumber: 7) + } + if let v = _storage._externalDocs { + try visitor.visitSingularMessageField(value: v, fieldNumber: 8) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 9) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// A single encoding definition applied to a single schema property. +public struct Openapi_V3_Encoding: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Encoding" + + public var contentType: String { + get {return _storage._contentType} + set {_uniqueStorage()._contentType = newValue} + } + + public var headers: Openapi_V3_HeadersOrReferences { + get {return _storage._headers ?? Openapi_V3_HeadersOrReferences()} + set {_uniqueStorage()._headers = newValue} + } + /// Returns true if `headers` has been explicitly set. + public var hasHeaders: Bool {return _storage._headers != nil} + /// Clears the value of `headers`. Subsequent reads from it will return its default value. + public mutating func clearHeaders() {_storage._headers = nil} + + public var style: String { + get {return _storage._style} + set {_uniqueStorage()._style = newValue} + } + + public var explode: Bool { + get {return _storage._explode} + set {_uniqueStorage()._explode = newValue} + } + + public var allowReserved: Bool { + get {return _storage._allowReserved} + set {_uniqueStorage()._allowReserved = newValue} + } + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._contentType) + case 2: try decoder.decodeSingularMessageField(value: &_storage._headers) + case 3: try decoder.decodeSingularStringField(value: &_storage._style) + case 4: try decoder.decodeSingularBoolField(value: &_storage._explode) + case 5: try decoder.decodeSingularBoolField(value: &_storage._allowReserved) + case 6: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._contentType.isEmpty { + try visitor.visitSingularStringField(value: _storage._contentType, fieldNumber: 1) + } + if let v = _storage._headers { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + if !_storage._style.isEmpty { + try visitor.visitSingularStringField(value: _storage._style, fieldNumber: 3) + } + if _storage._explode != false { + try visitor.visitSingularBoolField(value: _storage._explode, fieldNumber: 4) + } + if _storage._allowReserved != false { + try visitor.visitSingularBoolField(value: _storage._allowReserved, fieldNumber: 5) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 6) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_Encodings: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Encodings" + + public var additionalProperties: [Openapi_V3_NamedEncoding] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V3_Example: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Example" + + public var summary: String { + get {return _storage._summary} + set {_uniqueStorage()._summary = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var value: Openapi_V3_Any { + get {return _storage._value ?? Openapi_V3_Any()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var externalValue: String { + get {return _storage._externalValue} + set {_uniqueStorage()._externalValue = newValue} + } + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._summary) + case 2: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 3: try decoder.decodeSingularMessageField(value: &_storage._value) + case 4: try decoder.decodeSingularStringField(value: &_storage._externalValue) + case 5: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._summary.isEmpty { + try visitor.visitSingularStringField(value: _storage._summary, fieldNumber: 1) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 2) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if !_storage._externalValue.isEmpty { + try visitor.visitSingularStringField(value: _storage._externalValue, fieldNumber: 4) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 5) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_ExampleOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".ExampleOrReference" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var example: Openapi_V3_Example { + get { + if case .example(let v)? = _storage._oneof {return v} + return Openapi_V3_Example() + } + set {_uniqueStorage()._oneof = .example(newValue)} + } + + public var reference: Openapi_V3_Reference { + get { + if case .reference(let v)? = _storage._oneof {return v} + return Openapi_V3_Reference() + } + set {_uniqueStorage()._oneof = .reference(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case example(Openapi_V3_Example) + case reference(Openapi_V3_Reference) + + public static func ==(lhs: Openapi_V3_ExampleOrReference.OneOf_Oneof, rhs: Openapi_V3_ExampleOrReference.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.example(let l), .example(let r)): return l == r + case (.reference(let l), .reference(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V3_Example? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .example(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .example(v)} + case 2: + var v: Openapi_V3_Reference? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .reference(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .reference(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .example(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .reference(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_Examples: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Examples" + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let _ = try decoder.nextFieldNumber() { + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V3_ExamplesOrReferences: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".ExamplesOrReferences" + + public var additionalProperties: [Openapi_V3_NamedExampleOrReference] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V3_Expression: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Expression" + + public var additionalProperties: [Openapi_V3_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Allows referencing an external resource for extended documentation. +public struct Openapi_V3_ExternalDocs: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".ExternalDocs" + + public var description_p: String = String() + + public var url: String = String() + + public var specificationExtension: [Openapi_V3_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.description_p) + case 2: try decoder.decodeSingularStringField(value: &self.url) + case 3: try decoder.decodeRepeatedMessageField(value: &self.specificationExtension) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.description_p.isEmpty { + try visitor.visitSingularStringField(value: self.description_p, fieldNumber: 1) + } + if !self.url.isEmpty { + try visitor.visitSingularStringField(value: self.url, fieldNumber: 2) + } + if !self.specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: self.specificationExtension, fieldNumber: 3) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// The Header Object follows the structure of the Parameter Object with the following changes: 1. `name` MUST NOT be specified, it is given in the corresponding `headers` map. 1. `in` MUST NOT be specified, it is implicitly in `header`. 1. All traits that are affected by the location MUST be applicable to a location of `header` (for example, `style`). +public struct Openapi_V3_Header: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Header" + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var required: Bool { + get {return _storage._required} + set {_uniqueStorage()._required = newValue} + } + + public var deprecated: Bool { + get {return _storage._deprecated} + set {_uniqueStorage()._deprecated = newValue} + } + + public var allowEmptyValue: Bool { + get {return _storage._allowEmptyValue} + set {_uniqueStorage()._allowEmptyValue = newValue} + } + + public var style: String { + get {return _storage._style} + set {_uniqueStorage()._style = newValue} + } + + public var explode: Bool { + get {return _storage._explode} + set {_uniqueStorage()._explode = newValue} + } + + public var allowReserved: Bool { + get {return _storage._allowReserved} + set {_uniqueStorage()._allowReserved = newValue} + } + + public var schema: Openapi_V3_SchemaOrReference { + get {return _storage._schema ?? Openapi_V3_SchemaOrReference()} + set {_uniqueStorage()._schema = newValue} + } + /// Returns true if `schema` has been explicitly set. + public var hasSchema: Bool {return _storage._schema != nil} + /// Clears the value of `schema`. Subsequent reads from it will return its default value. + public mutating func clearSchema() {_storage._schema = nil} + + public var example: Openapi_V3_Any { + get {return _storage._example ?? Openapi_V3_Any()} + set {_uniqueStorage()._example = newValue} + } + /// Returns true if `example` has been explicitly set. + public var hasExample: Bool {return _storage._example != nil} + /// Clears the value of `example`. Subsequent reads from it will return its default value. + public mutating func clearExample() {_storage._example = nil} + + public var examples: Openapi_V3_ExamplesOrReferences { + get {return _storage._examples ?? Openapi_V3_ExamplesOrReferences()} + set {_uniqueStorage()._examples = newValue} + } + /// Returns true if `examples` has been explicitly set. + public var hasExamples: Bool {return _storage._examples != nil} + /// Clears the value of `examples`. Subsequent reads from it will return its default value. + public mutating func clearExamples() {_storage._examples = nil} + + public var content: Openapi_V3_MediaTypes { + get {return _storage._content ?? Openapi_V3_MediaTypes()} + set {_uniqueStorage()._content = newValue} + } + /// Returns true if `content` has been explicitly set. + public var hasContent: Bool {return _storage._content != nil} + /// Clears the value of `content`. Subsequent reads from it will return its default value. + public mutating func clearContent() {_storage._content = nil} + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 2: try decoder.decodeSingularBoolField(value: &_storage._required) + case 3: try decoder.decodeSingularBoolField(value: &_storage._deprecated) + case 4: try decoder.decodeSingularBoolField(value: &_storage._allowEmptyValue) + case 5: try decoder.decodeSingularStringField(value: &_storage._style) + case 6: try decoder.decodeSingularBoolField(value: &_storage._explode) + case 7: try decoder.decodeSingularBoolField(value: &_storage._allowReserved) + case 8: try decoder.decodeSingularMessageField(value: &_storage._schema) + case 9: try decoder.decodeSingularMessageField(value: &_storage._example) + case 10: try decoder.decodeSingularMessageField(value: &_storage._examples) + case 11: try decoder.decodeSingularMessageField(value: &_storage._content) + case 12: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 1) + } + if _storage._required != false { + try visitor.visitSingularBoolField(value: _storage._required, fieldNumber: 2) + } + if _storage._deprecated != false { + try visitor.visitSingularBoolField(value: _storage._deprecated, fieldNumber: 3) + } + if _storage._allowEmptyValue != false { + try visitor.visitSingularBoolField(value: _storage._allowEmptyValue, fieldNumber: 4) + } + if !_storage._style.isEmpty { + try visitor.visitSingularStringField(value: _storage._style, fieldNumber: 5) + } + if _storage._explode != false { + try visitor.visitSingularBoolField(value: _storage._explode, fieldNumber: 6) + } + if _storage._allowReserved != false { + try visitor.visitSingularBoolField(value: _storage._allowReserved, fieldNumber: 7) + } + if let v = _storage._schema { + try visitor.visitSingularMessageField(value: v, fieldNumber: 8) + } + if let v = _storage._example { + try visitor.visitSingularMessageField(value: v, fieldNumber: 9) + } + if let v = _storage._examples { + try visitor.visitSingularMessageField(value: v, fieldNumber: 10) + } + if let v = _storage._content { + try visitor.visitSingularMessageField(value: v, fieldNumber: 11) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 12) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_HeaderOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".HeaderOrReference" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var header: Openapi_V3_Header { + get { + if case .header(let v)? = _storage._oneof {return v} + return Openapi_V3_Header() + } + set {_uniqueStorage()._oneof = .header(newValue)} + } + + public var reference: Openapi_V3_Reference { + get { + if case .reference(let v)? = _storage._oneof {return v} + return Openapi_V3_Reference() + } + set {_uniqueStorage()._oneof = .reference(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case header(Openapi_V3_Header) + case reference(Openapi_V3_Reference) + + public static func ==(lhs: Openapi_V3_HeaderOrReference.OneOf_Oneof, rhs: Openapi_V3_HeaderOrReference.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.header(let l), .header(let r)): return l == r + case (.reference(let l), .reference(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V3_Header? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .header(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .header(v)} + case 2: + var v: Openapi_V3_Reference? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .reference(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .reference(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .header(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .reference(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_HeadersOrReferences: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".HeadersOrReferences" + + public var additionalProperties: [Openapi_V3_NamedHeaderOrReference] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// The object provides metadata about the API. The metadata MAY be used by the clients if needed, and MAY be presented in editing or documentation generation tools for convenience. +public struct Openapi_V3_Info: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Info" + + public var title: String { + get {return _storage._title} + set {_uniqueStorage()._title = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var termsOfService: String { + get {return _storage._termsOfService} + set {_uniqueStorage()._termsOfService = newValue} + } + + public var contact: Openapi_V3_Contact { + get {return _storage._contact ?? Openapi_V3_Contact()} + set {_uniqueStorage()._contact = newValue} + } + /// Returns true if `contact` has been explicitly set. + public var hasContact: Bool {return _storage._contact != nil} + /// Clears the value of `contact`. Subsequent reads from it will return its default value. + public mutating func clearContact() {_storage._contact = nil} + + public var license: Openapi_V3_License { + get {return _storage._license ?? Openapi_V3_License()} + set {_uniqueStorage()._license = newValue} + } + /// Returns true if `license` has been explicitly set. + public var hasLicense: Bool {return _storage._license != nil} + /// Clears the value of `license`. Subsequent reads from it will return its default value. + public mutating func clearLicense() {_storage._license = nil} + + public var version: String { + get {return _storage._version} + set {_uniqueStorage()._version = newValue} + } + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._title) + case 2: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 3: try decoder.decodeSingularStringField(value: &_storage._termsOfService) + case 4: try decoder.decodeSingularMessageField(value: &_storage._contact) + case 5: try decoder.decodeSingularMessageField(value: &_storage._license) + case 6: try decoder.decodeSingularStringField(value: &_storage._version) + case 7: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._title.isEmpty { + try visitor.visitSingularStringField(value: _storage._title, fieldNumber: 1) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 2) + } + if !_storage._termsOfService.isEmpty { + try visitor.visitSingularStringField(value: _storage._termsOfService, fieldNumber: 3) + } + if let v = _storage._contact { + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + } + if let v = _storage._license { + try visitor.visitSingularMessageField(value: v, fieldNumber: 5) + } + if !_storage._version.isEmpty { + try visitor.visitSingularStringField(value: _storage._version, fieldNumber: 6) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 7) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_ItemsItem: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".ItemsItem" + + public var schemaOrReference: [Openapi_V3_SchemaOrReference] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.schemaOrReference) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.schemaOrReference.isEmpty { + try visitor.visitRepeatedMessageField(value: self.schemaOrReference, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// License information for the exposed API. +public struct Openapi_V3_License: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".License" + + public var name: String = String() + + public var url: String = String() + + public var specificationExtension: [Openapi_V3_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.name) + case 2: try decoder.decodeSingularStringField(value: &self.url) + case 3: try decoder.decodeRepeatedMessageField(value: &self.specificationExtension) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.name.isEmpty { + try visitor.visitSingularStringField(value: self.name, fieldNumber: 1) + } + if !self.url.isEmpty { + try visitor.visitSingularStringField(value: self.url, fieldNumber: 2) + } + if !self.specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: self.specificationExtension, fieldNumber: 3) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// The `Link object` represents a possible design-time link for a response. The presence of a link does not guarantee the caller's ability to successfully invoke it, rather it provides a known relationship and traversal mechanism between responses and other operations. Unlike _dynamic_ links (i.e. links provided **in** the response payload), the OAS linking mechanism does not require link information in the runtime response. For computing links, and providing instructions to execute them, a runtime expression is used for accessing values in an operation and using them as parameters while invoking the linked operation. +public struct Openapi_V3_Link: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Link" + + public var operationRef: String { + get {return _storage._operationRef} + set {_uniqueStorage()._operationRef = newValue} + } + + public var operationID: String { + get {return _storage._operationID} + set {_uniqueStorage()._operationID = newValue} + } + + public var parameters: Openapi_V3_AnysOrExpressions { + get {return _storage._parameters ?? Openapi_V3_AnysOrExpressions()} + set {_uniqueStorage()._parameters = newValue} + } + /// Returns true if `parameters` has been explicitly set. + public var hasParameters: Bool {return _storage._parameters != nil} + /// Clears the value of `parameters`. Subsequent reads from it will return its default value. + public mutating func clearParameters() {_storage._parameters = nil} + + public var requestBody: Openapi_V3_AnyOrExpression { + get {return _storage._requestBody ?? Openapi_V3_AnyOrExpression()} + set {_uniqueStorage()._requestBody = newValue} + } + /// Returns true if `requestBody` has been explicitly set. + public var hasRequestBody: Bool {return _storage._requestBody != nil} + /// Clears the value of `requestBody`. Subsequent reads from it will return its default value. + public mutating func clearRequestBody() {_storage._requestBody = nil} + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var server: Openapi_V3_Server { + get {return _storage._server ?? Openapi_V3_Server()} + set {_uniqueStorage()._server = newValue} + } + /// Returns true if `server` has been explicitly set. + public var hasServer: Bool {return _storage._server != nil} + /// Clears the value of `server`. Subsequent reads from it will return its default value. + public mutating func clearServer() {_storage._server = nil} + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._operationRef) + case 2: try decoder.decodeSingularStringField(value: &_storage._operationID) + case 3: try decoder.decodeSingularMessageField(value: &_storage._parameters) + case 4: try decoder.decodeSingularMessageField(value: &_storage._requestBody) + case 5: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 6: try decoder.decodeSingularMessageField(value: &_storage._server) + case 7: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._operationRef.isEmpty { + try visitor.visitSingularStringField(value: _storage._operationRef, fieldNumber: 1) + } + if !_storage._operationID.isEmpty { + try visitor.visitSingularStringField(value: _storage._operationID, fieldNumber: 2) + } + if let v = _storage._parameters { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if let v = _storage._requestBody { + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 5) + } + if let v = _storage._server { + try visitor.visitSingularMessageField(value: v, fieldNumber: 6) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 7) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_LinkOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".LinkOrReference" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var link: Openapi_V3_Link { + get { + if case .link(let v)? = _storage._oneof {return v} + return Openapi_V3_Link() + } + set {_uniqueStorage()._oneof = .link(newValue)} + } + + public var reference: Openapi_V3_Reference { + get { + if case .reference(let v)? = _storage._oneof {return v} + return Openapi_V3_Reference() + } + set {_uniqueStorage()._oneof = .reference(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case link(Openapi_V3_Link) + case reference(Openapi_V3_Reference) + + public static func ==(lhs: Openapi_V3_LinkOrReference.OneOf_Oneof, rhs: Openapi_V3_LinkOrReference.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.link(let l), .link(let r)): return l == r + case (.reference(let l), .reference(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V3_Link? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .link(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .link(v)} + case 2: + var v: Openapi_V3_Reference? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .reference(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .reference(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .link(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .reference(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_LinksOrReferences: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".LinksOrReferences" + + public var additionalProperties: [Openapi_V3_NamedLinkOrReference] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Each Media Type Object provides schema and examples for the media type identified by its key. +public struct Openapi_V3_MediaType: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".MediaType" + + public var schema: Openapi_V3_SchemaOrReference { + get {return _storage._schema ?? Openapi_V3_SchemaOrReference()} + set {_uniqueStorage()._schema = newValue} + } + /// Returns true if `schema` has been explicitly set. + public var hasSchema: Bool {return _storage._schema != nil} + /// Clears the value of `schema`. Subsequent reads from it will return its default value. + public mutating func clearSchema() {_storage._schema = nil} + + public var example: Openapi_V3_Any { + get {return _storage._example ?? Openapi_V3_Any()} + set {_uniqueStorage()._example = newValue} + } + /// Returns true if `example` has been explicitly set. + public var hasExample: Bool {return _storage._example != nil} + /// Clears the value of `example`. Subsequent reads from it will return its default value. + public mutating func clearExample() {_storage._example = nil} + + public var examples: Openapi_V3_ExamplesOrReferences { + get {return _storage._examples ?? Openapi_V3_ExamplesOrReferences()} + set {_uniqueStorage()._examples = newValue} + } + /// Returns true if `examples` has been explicitly set. + public var hasExamples: Bool {return _storage._examples != nil} + /// Clears the value of `examples`. Subsequent reads from it will return its default value. + public mutating func clearExamples() {_storage._examples = nil} + + public var encoding: Openapi_V3_Encodings { + get {return _storage._encoding ?? Openapi_V3_Encodings()} + set {_uniqueStorage()._encoding = newValue} + } + /// Returns true if `encoding` has been explicitly set. + public var hasEncoding: Bool {return _storage._encoding != nil} + /// Clears the value of `encoding`. Subsequent reads from it will return its default value. + public mutating func clearEncoding() {_storage._encoding = nil} + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularMessageField(value: &_storage._schema) + case 2: try decoder.decodeSingularMessageField(value: &_storage._example) + case 3: try decoder.decodeSingularMessageField(value: &_storage._examples) + case 4: try decoder.decodeSingularMessageField(value: &_storage._encoding) + case 5: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if let v = _storage._schema { + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + } + if let v = _storage._example { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + if let v = _storage._examples { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if let v = _storage._encoding { + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 5) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_MediaTypes: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".MediaTypes" + + public var additionalProperties: [Openapi_V3_NamedMediaType] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Automatically-generated message used to represent maps of Any as ordered (name,value) pairs. +public struct Openapi_V3_NamedAny: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedAny" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V3_Any { + get {return _storage._value ?? Openapi_V3_Any()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of AnyOrExpression as ordered (name,value) pairs. +public struct Openapi_V3_NamedAnyOrExpression: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedAnyOrExpression" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V3_AnyOrExpression { + get {return _storage._value ?? Openapi_V3_AnyOrExpression()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of CallbackOrReference as ordered (name,value) pairs. +public struct Openapi_V3_NamedCallbackOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedCallbackOrReference" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V3_CallbackOrReference { + get {return _storage._value ?? Openapi_V3_CallbackOrReference()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of Encoding as ordered (name,value) pairs. +public struct Openapi_V3_NamedEncoding: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedEncoding" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V3_Encoding { + get {return _storage._value ?? Openapi_V3_Encoding()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of ExampleOrReference as ordered (name,value) pairs. +public struct Openapi_V3_NamedExampleOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedExampleOrReference" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V3_ExampleOrReference { + get {return _storage._value ?? Openapi_V3_ExampleOrReference()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of HeaderOrReference as ordered (name,value) pairs. +public struct Openapi_V3_NamedHeaderOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedHeaderOrReference" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V3_HeaderOrReference { + get {return _storage._value ?? Openapi_V3_HeaderOrReference()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of LinkOrReference as ordered (name,value) pairs. +public struct Openapi_V3_NamedLinkOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedLinkOrReference" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V3_LinkOrReference { + get {return _storage._value ?? Openapi_V3_LinkOrReference()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of MediaType as ordered (name,value) pairs. +public struct Openapi_V3_NamedMediaType: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedMediaType" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V3_MediaType { + get {return _storage._value ?? Openapi_V3_MediaType()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of ParameterOrReference as ordered (name,value) pairs. +public struct Openapi_V3_NamedParameterOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedParameterOrReference" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V3_ParameterOrReference { + get {return _storage._value ?? Openapi_V3_ParameterOrReference()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of PathItem as ordered (name,value) pairs. +public struct Openapi_V3_NamedPathItem: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedPathItem" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V3_PathItem { + get {return _storage._value ?? Openapi_V3_PathItem()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of RequestBodyOrReference as ordered (name,value) pairs. +public struct Openapi_V3_NamedRequestBodyOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedRequestBodyOrReference" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V3_RequestBodyOrReference { + get {return _storage._value ?? Openapi_V3_RequestBodyOrReference()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of ResponseOrReference as ordered (name,value) pairs. +public struct Openapi_V3_NamedResponseOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedResponseOrReference" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V3_ResponseOrReference { + get {return _storage._value ?? Openapi_V3_ResponseOrReference()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of SchemaOrReference as ordered (name,value) pairs. +public struct Openapi_V3_NamedSchemaOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedSchemaOrReference" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V3_SchemaOrReference { + get {return _storage._value ?? Openapi_V3_SchemaOrReference()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of SecuritySchemeOrReference as ordered (name,value) pairs. +public struct Openapi_V3_NamedSecuritySchemeOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedSecuritySchemeOrReference" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V3_SecuritySchemeOrReference { + get {return _storage._value ?? Openapi_V3_SecuritySchemeOrReference()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of ServerVariable as ordered (name,value) pairs. +public struct Openapi_V3_NamedServerVariable: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedServerVariable" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V3_ServerVariable { + get {return _storage._value ?? Openapi_V3_ServerVariable()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of string as ordered (name,value) pairs. +public struct Openapi_V3_NamedString: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedString" + + /// Map key + public var name: String = String() + + /// Mapped value + public var value: String = String() + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.name) + case 2: try decoder.decodeSingularStringField(value: &self.value) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.name.isEmpty { + try visitor.visitSingularStringField(value: self.name, fieldNumber: 1) + } + if !self.value.isEmpty { + try visitor.visitSingularStringField(value: self.value, fieldNumber: 2) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Configuration details for a supported OAuth Flow +public struct Openapi_V3_OauthFlow: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".OauthFlow" + + public var authorizationURL: String { + get {return _storage._authorizationURL} + set {_uniqueStorage()._authorizationURL = newValue} + } + + public var tokenURL: String { + get {return _storage._tokenURL} + set {_uniqueStorage()._tokenURL = newValue} + } + + public var refreshURL: String { + get {return _storage._refreshURL} + set {_uniqueStorage()._refreshURL = newValue} + } + + public var scopes: Openapi_V3_Strings { + get {return _storage._scopes ?? Openapi_V3_Strings()} + set {_uniqueStorage()._scopes = newValue} + } + /// Returns true if `scopes` has been explicitly set. + public var hasScopes: Bool {return _storage._scopes != nil} + /// Clears the value of `scopes`. Subsequent reads from it will return its default value. + public mutating func clearScopes() {_storage._scopes = nil} + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._authorizationURL) + case 2: try decoder.decodeSingularStringField(value: &_storage._tokenURL) + case 3: try decoder.decodeSingularStringField(value: &_storage._refreshURL) + case 4: try decoder.decodeSingularMessageField(value: &_storage._scopes) + case 5: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._authorizationURL.isEmpty { + try visitor.visitSingularStringField(value: _storage._authorizationURL, fieldNumber: 1) + } + if !_storage._tokenURL.isEmpty { + try visitor.visitSingularStringField(value: _storage._tokenURL, fieldNumber: 2) + } + if !_storage._refreshURL.isEmpty { + try visitor.visitSingularStringField(value: _storage._refreshURL, fieldNumber: 3) + } + if let v = _storage._scopes { + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 5) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Allows configuration of the supported OAuth Flows. +public struct Openapi_V3_OauthFlows: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".OauthFlows" + + public var implicit: Openapi_V3_OauthFlow { + get {return _storage._implicit ?? Openapi_V3_OauthFlow()} + set {_uniqueStorage()._implicit = newValue} + } + /// Returns true if `implicit` has been explicitly set. + public var hasImplicit: Bool {return _storage._implicit != nil} + /// Clears the value of `implicit`. Subsequent reads from it will return its default value. + public mutating func clearImplicit() {_storage._implicit = nil} + + public var password: Openapi_V3_OauthFlow { + get {return _storage._password ?? Openapi_V3_OauthFlow()} + set {_uniqueStorage()._password = newValue} + } + /// Returns true if `password` has been explicitly set. + public var hasPassword: Bool {return _storage._password != nil} + /// Clears the value of `password`. Subsequent reads from it will return its default value. + public mutating func clearPassword() {_storage._password = nil} + + public var clientCredentials: Openapi_V3_OauthFlow { + get {return _storage._clientCredentials ?? Openapi_V3_OauthFlow()} + set {_uniqueStorage()._clientCredentials = newValue} + } + /// Returns true if `clientCredentials` has been explicitly set. + public var hasClientCredentials: Bool {return _storage._clientCredentials != nil} + /// Clears the value of `clientCredentials`. Subsequent reads from it will return its default value. + public mutating func clearClientCredentials() {_storage._clientCredentials = nil} + + public var authorizationCode: Openapi_V3_OauthFlow { + get {return _storage._authorizationCode ?? Openapi_V3_OauthFlow()} + set {_uniqueStorage()._authorizationCode = newValue} + } + /// Returns true if `authorizationCode` has been explicitly set. + public var hasAuthorizationCode: Bool {return _storage._authorizationCode != nil} + /// Clears the value of `authorizationCode`. Subsequent reads from it will return its default value. + public mutating func clearAuthorizationCode() {_storage._authorizationCode = nil} + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularMessageField(value: &_storage._implicit) + case 2: try decoder.decodeSingularMessageField(value: &_storage._password) + case 3: try decoder.decodeSingularMessageField(value: &_storage._clientCredentials) + case 4: try decoder.decodeSingularMessageField(value: &_storage._authorizationCode) + case 5: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if let v = _storage._implicit { + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + } + if let v = _storage._password { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + if let v = _storage._clientCredentials { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if let v = _storage._authorizationCode { + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 5) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_Object: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Object" + + public var additionalProperties: [Openapi_V3_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Describes a single API operation on a path. +public struct Openapi_V3_Operation: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Operation" + + public var tags: [String] { + get {return _storage._tags} + set {_uniqueStorage()._tags = newValue} + } + + public var summary: String { + get {return _storage._summary} + set {_uniqueStorage()._summary = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var externalDocs: Openapi_V3_ExternalDocs { + get {return _storage._externalDocs ?? Openapi_V3_ExternalDocs()} + set {_uniqueStorage()._externalDocs = newValue} + } + /// Returns true if `externalDocs` has been explicitly set. + public var hasExternalDocs: Bool {return _storage._externalDocs != nil} + /// Clears the value of `externalDocs`. Subsequent reads from it will return its default value. + public mutating func clearExternalDocs() {_storage._externalDocs = nil} + + public var operationID: String { + get {return _storage._operationID} + set {_uniqueStorage()._operationID = newValue} + } + + public var parameters: [Openapi_V3_ParameterOrReference] { + get {return _storage._parameters} + set {_uniqueStorage()._parameters = newValue} + } + + public var requestBody: Openapi_V3_RequestBodyOrReference { + get {return _storage._requestBody ?? Openapi_V3_RequestBodyOrReference()} + set {_uniqueStorage()._requestBody = newValue} + } + /// Returns true if `requestBody` has been explicitly set. + public var hasRequestBody: Bool {return _storage._requestBody != nil} + /// Clears the value of `requestBody`. Subsequent reads from it will return its default value. + public mutating func clearRequestBody() {_storage._requestBody = nil} + + public var responses: Openapi_V3_Responses { + get {return _storage._responses ?? Openapi_V3_Responses()} + set {_uniqueStorage()._responses = newValue} + } + /// Returns true if `responses` has been explicitly set. + public var hasResponses: Bool {return _storage._responses != nil} + /// Clears the value of `responses`. Subsequent reads from it will return its default value. + public mutating func clearResponses() {_storage._responses = nil} + + public var callbacks: Openapi_V3_CallbacksOrReferences { + get {return _storage._callbacks ?? Openapi_V3_CallbacksOrReferences()} + set {_uniqueStorage()._callbacks = newValue} + } + /// Returns true if `callbacks` has been explicitly set. + public var hasCallbacks: Bool {return _storage._callbacks != nil} + /// Clears the value of `callbacks`. Subsequent reads from it will return its default value. + public mutating func clearCallbacks() {_storage._callbacks = nil} + + public var deprecated: Bool { + get {return _storage._deprecated} + set {_uniqueStorage()._deprecated = newValue} + } + + public var security: [Openapi_V3_SecurityRequirement] { + get {return _storage._security} + set {_uniqueStorage()._security = newValue} + } + + public var servers: [Openapi_V3_Server] { + get {return _storage._servers} + set {_uniqueStorage()._servers = newValue} + } + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedStringField(value: &_storage._tags) + case 2: try decoder.decodeSingularStringField(value: &_storage._summary) + case 3: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 4: try decoder.decodeSingularMessageField(value: &_storage._externalDocs) + case 5: try decoder.decodeSingularStringField(value: &_storage._operationID) + case 6: try decoder.decodeRepeatedMessageField(value: &_storage._parameters) + case 7: try decoder.decodeSingularMessageField(value: &_storage._requestBody) + case 8: try decoder.decodeSingularMessageField(value: &_storage._responses) + case 9: try decoder.decodeSingularMessageField(value: &_storage._callbacks) + case 10: try decoder.decodeSingularBoolField(value: &_storage._deprecated) + case 11: try decoder.decodeRepeatedMessageField(value: &_storage._security) + case 12: try decoder.decodeRepeatedMessageField(value: &_storage._servers) + case 13: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._tags.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._tags, fieldNumber: 1) + } + if !_storage._summary.isEmpty { + try visitor.visitSingularStringField(value: _storage._summary, fieldNumber: 2) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 3) + } + if let v = _storage._externalDocs { + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + } + if !_storage._operationID.isEmpty { + try visitor.visitSingularStringField(value: _storage._operationID, fieldNumber: 5) + } + if !_storage._parameters.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._parameters, fieldNumber: 6) + } + if let v = _storage._requestBody { + try visitor.visitSingularMessageField(value: v, fieldNumber: 7) + } + if let v = _storage._responses { + try visitor.visitSingularMessageField(value: v, fieldNumber: 8) + } + if let v = _storage._callbacks { + try visitor.visitSingularMessageField(value: v, fieldNumber: 9) + } + if _storage._deprecated != false { + try visitor.visitSingularBoolField(value: _storage._deprecated, fieldNumber: 10) + } + if !_storage._security.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._security, fieldNumber: 11) + } + if !_storage._servers.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._servers, fieldNumber: 12) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 13) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Describes a single operation parameter. A unique parameter is defined by a combination of a name and location. +public struct Openapi_V3_Parameter: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Parameter" + + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + public var `in`: String { + get {return _storage._in} + set {_uniqueStorage()._in = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var required: Bool { + get {return _storage._required} + set {_uniqueStorage()._required = newValue} + } + + public var deprecated: Bool { + get {return _storage._deprecated} + set {_uniqueStorage()._deprecated = newValue} + } + + public var allowEmptyValue: Bool { + get {return _storage._allowEmptyValue} + set {_uniqueStorage()._allowEmptyValue = newValue} + } + + public var style: String { + get {return _storage._style} + set {_uniqueStorage()._style = newValue} + } + + public var explode: Bool { + get {return _storage._explode} + set {_uniqueStorage()._explode = newValue} + } + + public var allowReserved: Bool { + get {return _storage._allowReserved} + set {_uniqueStorage()._allowReserved = newValue} + } + + public var schema: Openapi_V3_SchemaOrReference { + get {return _storage._schema ?? Openapi_V3_SchemaOrReference()} + set {_uniqueStorage()._schema = newValue} + } + /// Returns true if `schema` has been explicitly set. + public var hasSchema: Bool {return _storage._schema != nil} + /// Clears the value of `schema`. Subsequent reads from it will return its default value. + public mutating func clearSchema() {_storage._schema = nil} + + public var example: Openapi_V3_Any { + get {return _storage._example ?? Openapi_V3_Any()} + set {_uniqueStorage()._example = newValue} + } + /// Returns true if `example` has been explicitly set. + public var hasExample: Bool {return _storage._example != nil} + /// Clears the value of `example`. Subsequent reads from it will return its default value. + public mutating func clearExample() {_storage._example = nil} + + public var examples: Openapi_V3_ExamplesOrReferences { + get {return _storage._examples ?? Openapi_V3_ExamplesOrReferences()} + set {_uniqueStorage()._examples = newValue} + } + /// Returns true if `examples` has been explicitly set. + public var hasExamples: Bool {return _storage._examples != nil} + /// Clears the value of `examples`. Subsequent reads from it will return its default value. + public mutating func clearExamples() {_storage._examples = nil} + + public var content: Openapi_V3_MediaTypes { + get {return _storage._content ?? Openapi_V3_MediaTypes()} + set {_uniqueStorage()._content = newValue} + } + /// Returns true if `content` has been explicitly set. + public var hasContent: Bool {return _storage._content != nil} + /// Clears the value of `content`. Subsequent reads from it will return its default value. + public mutating func clearContent() {_storage._content = nil} + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularStringField(value: &_storage._in) + case 3: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 4: try decoder.decodeSingularBoolField(value: &_storage._required) + case 5: try decoder.decodeSingularBoolField(value: &_storage._deprecated) + case 6: try decoder.decodeSingularBoolField(value: &_storage._allowEmptyValue) + case 7: try decoder.decodeSingularStringField(value: &_storage._style) + case 8: try decoder.decodeSingularBoolField(value: &_storage._explode) + case 9: try decoder.decodeSingularBoolField(value: &_storage._allowReserved) + case 10: try decoder.decodeSingularMessageField(value: &_storage._schema) + case 11: try decoder.decodeSingularMessageField(value: &_storage._example) + case 12: try decoder.decodeSingularMessageField(value: &_storage._examples) + case 13: try decoder.decodeSingularMessageField(value: &_storage._content) + case 14: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if !_storage._in.isEmpty { + try visitor.visitSingularStringField(value: _storage._in, fieldNumber: 2) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 3) + } + if _storage._required != false { + try visitor.visitSingularBoolField(value: _storage._required, fieldNumber: 4) + } + if _storage._deprecated != false { + try visitor.visitSingularBoolField(value: _storage._deprecated, fieldNumber: 5) + } + if _storage._allowEmptyValue != false { + try visitor.visitSingularBoolField(value: _storage._allowEmptyValue, fieldNumber: 6) + } + if !_storage._style.isEmpty { + try visitor.visitSingularStringField(value: _storage._style, fieldNumber: 7) + } + if _storage._explode != false { + try visitor.visitSingularBoolField(value: _storage._explode, fieldNumber: 8) + } + if _storage._allowReserved != false { + try visitor.visitSingularBoolField(value: _storage._allowReserved, fieldNumber: 9) + } + if let v = _storage._schema { + try visitor.visitSingularMessageField(value: v, fieldNumber: 10) + } + if let v = _storage._example { + try visitor.visitSingularMessageField(value: v, fieldNumber: 11) + } + if let v = _storage._examples { + try visitor.visitSingularMessageField(value: v, fieldNumber: 12) + } + if let v = _storage._content { + try visitor.visitSingularMessageField(value: v, fieldNumber: 13) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 14) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_ParameterOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".ParameterOrReference" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var parameter: Openapi_V3_Parameter { + get { + if case .parameter(let v)? = _storage._oneof {return v} + return Openapi_V3_Parameter() + } + set {_uniqueStorage()._oneof = .parameter(newValue)} + } + + public var reference: Openapi_V3_Reference { + get { + if case .reference(let v)? = _storage._oneof {return v} + return Openapi_V3_Reference() + } + set {_uniqueStorage()._oneof = .reference(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case parameter(Openapi_V3_Parameter) + case reference(Openapi_V3_Reference) + + public static func ==(lhs: Openapi_V3_ParameterOrReference.OneOf_Oneof, rhs: Openapi_V3_ParameterOrReference.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.parameter(let l), .parameter(let r)): return l == r + case (.reference(let l), .reference(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V3_Parameter? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .parameter(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .parameter(v)} + case 2: + var v: Openapi_V3_Reference? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .reference(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .reference(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .parameter(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .reference(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_ParametersOrReferences: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".ParametersOrReferences" + + public var additionalProperties: [Openapi_V3_NamedParameterOrReference] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Describes the operations available on a single path. A Path Item MAY be empty, due to ACL constraints. The path itself is still exposed to the documentation viewer but they will not know which operations and parameters are available. +public struct Openapi_V3_PathItem: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".PathItem" + + public var ref: String { + get {return _storage._ref} + set {_uniqueStorage()._ref = newValue} + } + + public var summary: String { + get {return _storage._summary} + set {_uniqueStorage()._summary = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var get: Openapi_V3_Operation { + get {return _storage._get ?? Openapi_V3_Operation()} + set {_uniqueStorage()._get = newValue} + } + /// Returns true if `get` has been explicitly set. + public var hasGet: Bool {return _storage._get != nil} + /// Clears the value of `get`. Subsequent reads from it will return its default value. + public mutating func clearGet() {_storage._get = nil} + + public var put: Openapi_V3_Operation { + get {return _storage._put ?? Openapi_V3_Operation()} + set {_uniqueStorage()._put = newValue} + } + /// Returns true if `put` has been explicitly set. + public var hasPut: Bool {return _storage._put != nil} + /// Clears the value of `put`. Subsequent reads from it will return its default value. + public mutating func clearPut() {_storage._put = nil} + + public var post: Openapi_V3_Operation { + get {return _storage._post ?? Openapi_V3_Operation()} + set {_uniqueStorage()._post = newValue} + } + /// Returns true if `post` has been explicitly set. + public var hasPost: Bool {return _storage._post != nil} + /// Clears the value of `post`. Subsequent reads from it will return its default value. + public mutating func clearPost() {_storage._post = nil} + + public var delete: Openapi_V3_Operation { + get {return _storage._delete ?? Openapi_V3_Operation()} + set {_uniqueStorage()._delete = newValue} + } + /// Returns true if `delete` has been explicitly set. + public var hasDelete: Bool {return _storage._delete != nil} + /// Clears the value of `delete`. Subsequent reads from it will return its default value. + public mutating func clearDelete() {_storage._delete = nil} + + public var options: Openapi_V3_Operation { + get {return _storage._options ?? Openapi_V3_Operation()} + set {_uniqueStorage()._options = newValue} + } + /// Returns true if `options` has been explicitly set. + public var hasOptions: Bool {return _storage._options != nil} + /// Clears the value of `options`. Subsequent reads from it will return its default value. + public mutating func clearOptions() {_storage._options = nil} + + public var head: Openapi_V3_Operation { + get {return _storage._head ?? Openapi_V3_Operation()} + set {_uniqueStorage()._head = newValue} + } + /// Returns true if `head` has been explicitly set. + public var hasHead: Bool {return _storage._head != nil} + /// Clears the value of `head`. Subsequent reads from it will return its default value. + public mutating func clearHead() {_storage._head = nil} + + public var patch: Openapi_V3_Operation { + get {return _storage._patch ?? Openapi_V3_Operation()} + set {_uniqueStorage()._patch = newValue} + } + /// Returns true if `patch` has been explicitly set. + public var hasPatch: Bool {return _storage._patch != nil} + /// Clears the value of `patch`. Subsequent reads from it will return its default value. + public mutating func clearPatch() {_storage._patch = nil} + + public var trace: Openapi_V3_Operation { + get {return _storage._trace ?? Openapi_V3_Operation()} + set {_uniqueStorage()._trace = newValue} + } + /// Returns true if `trace` has been explicitly set. + public var hasTrace: Bool {return _storage._trace != nil} + /// Clears the value of `trace`. Subsequent reads from it will return its default value. + public mutating func clearTrace() {_storage._trace = nil} + + public var servers: [Openapi_V3_Server] { + get {return _storage._servers} + set {_uniqueStorage()._servers = newValue} + } + + public var parameters: [Openapi_V3_ParameterOrReference] { + get {return _storage._parameters} + set {_uniqueStorage()._parameters = newValue} + } + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._ref) + case 2: try decoder.decodeSingularStringField(value: &_storage._summary) + case 3: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 4: try decoder.decodeSingularMessageField(value: &_storage._get) + case 5: try decoder.decodeSingularMessageField(value: &_storage._put) + case 6: try decoder.decodeSingularMessageField(value: &_storage._post) + case 7: try decoder.decodeSingularMessageField(value: &_storage._delete) + case 8: try decoder.decodeSingularMessageField(value: &_storage._options) + case 9: try decoder.decodeSingularMessageField(value: &_storage._head) + case 10: try decoder.decodeSingularMessageField(value: &_storage._patch) + case 11: try decoder.decodeSingularMessageField(value: &_storage._trace) + case 12: try decoder.decodeRepeatedMessageField(value: &_storage._servers) + case 13: try decoder.decodeRepeatedMessageField(value: &_storage._parameters) + case 14: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._ref.isEmpty { + try visitor.visitSingularStringField(value: _storage._ref, fieldNumber: 1) + } + if !_storage._summary.isEmpty { + try visitor.visitSingularStringField(value: _storage._summary, fieldNumber: 2) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 3) + } + if let v = _storage._get { + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + } + if let v = _storage._put { + try visitor.visitSingularMessageField(value: v, fieldNumber: 5) + } + if let v = _storage._post { + try visitor.visitSingularMessageField(value: v, fieldNumber: 6) + } + if let v = _storage._delete { + try visitor.visitSingularMessageField(value: v, fieldNumber: 7) + } + if let v = _storage._options { + try visitor.visitSingularMessageField(value: v, fieldNumber: 8) + } + if let v = _storage._head { + try visitor.visitSingularMessageField(value: v, fieldNumber: 9) + } + if let v = _storage._patch { + try visitor.visitSingularMessageField(value: v, fieldNumber: 10) + } + if let v = _storage._trace { + try visitor.visitSingularMessageField(value: v, fieldNumber: 11) + } + if !_storage._servers.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._servers, fieldNumber: 12) + } + if !_storage._parameters.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._parameters, fieldNumber: 13) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 14) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Holds the relative paths to the individual endpoints and their operations. The path is appended to the URL from the `Server Object` in order to construct the full URL. The Paths MAY be empty, due to ACL constraints. +public struct Openapi_V3_Paths: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Paths" + + public var path: [Openapi_V3_NamedPathItem] = [] + + public var specificationExtension: [Openapi_V3_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.path) + case 2: try decoder.decodeRepeatedMessageField(value: &self.specificationExtension) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.path.isEmpty { + try visitor.visitRepeatedMessageField(value: self.path, fieldNumber: 1) + } + if !self.specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: self.specificationExtension, fieldNumber: 2) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V3_Properties: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Properties" + + public var additionalProperties: [Openapi_V3_NamedSchemaOrReference] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// A simple object to allow referencing other components in the specification, internally and externally. The Reference Object is defined by JSON Reference and follows the same structure, behavior and rules. For this specification, reference resolution is accomplished as defined by the JSON Reference specification and not by the JSON Schema specification. +public struct Openapi_V3_Reference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Reference" + + public var ref: String = String() + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.ref) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.ref.isEmpty { + try visitor.visitSingularStringField(value: self.ref, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V3_RequestBodiesOrReferences: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".RequestBodiesOrReferences" + + public var additionalProperties: [Openapi_V3_NamedRequestBodyOrReference] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Describes a single request body. +public struct Openapi_V3_RequestBody: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".RequestBody" + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var content: Openapi_V3_MediaTypes { + get {return _storage._content ?? Openapi_V3_MediaTypes()} + set {_uniqueStorage()._content = newValue} + } + /// Returns true if `content` has been explicitly set. + public var hasContent: Bool {return _storage._content != nil} + /// Clears the value of `content`. Subsequent reads from it will return its default value. + public mutating func clearContent() {_storage._content = nil} + + public var required: Bool { + get {return _storage._required} + set {_uniqueStorage()._required = newValue} + } + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 2: try decoder.decodeSingularMessageField(value: &_storage._content) + case 3: try decoder.decodeSingularBoolField(value: &_storage._required) + case 4: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 1) + } + if let v = _storage._content { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + if _storage._required != false { + try visitor.visitSingularBoolField(value: _storage._required, fieldNumber: 3) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 4) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_RequestBodyOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".RequestBodyOrReference" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var requestBody: Openapi_V3_RequestBody { + get { + if case .requestBody(let v)? = _storage._oneof {return v} + return Openapi_V3_RequestBody() + } + set {_uniqueStorage()._oneof = .requestBody(newValue)} + } + + public var reference: Openapi_V3_Reference { + get { + if case .reference(let v)? = _storage._oneof {return v} + return Openapi_V3_Reference() + } + set {_uniqueStorage()._oneof = .reference(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case requestBody(Openapi_V3_RequestBody) + case reference(Openapi_V3_Reference) + + public static func ==(lhs: Openapi_V3_RequestBodyOrReference.OneOf_Oneof, rhs: Openapi_V3_RequestBodyOrReference.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.requestBody(let l), .requestBody(let r)): return l == r + case (.reference(let l), .reference(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V3_RequestBody? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .requestBody(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .requestBody(v)} + case 2: + var v: Openapi_V3_Reference? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .reference(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .reference(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .requestBody(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .reference(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Describes a single response from an API Operation, including design-time, static `links` to operations based on the response. +public struct Openapi_V3_Response: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Response" + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var headers: Openapi_V3_HeadersOrReferences { + get {return _storage._headers ?? Openapi_V3_HeadersOrReferences()} + set {_uniqueStorage()._headers = newValue} + } + /// Returns true if `headers` has been explicitly set. + public var hasHeaders: Bool {return _storage._headers != nil} + /// Clears the value of `headers`. Subsequent reads from it will return its default value. + public mutating func clearHeaders() {_storage._headers = nil} + + public var content: Openapi_V3_MediaTypes { + get {return _storage._content ?? Openapi_V3_MediaTypes()} + set {_uniqueStorage()._content = newValue} + } + /// Returns true if `content` has been explicitly set. + public var hasContent: Bool {return _storage._content != nil} + /// Clears the value of `content`. Subsequent reads from it will return its default value. + public mutating func clearContent() {_storage._content = nil} + + public var links: Openapi_V3_LinksOrReferences { + get {return _storage._links ?? Openapi_V3_LinksOrReferences()} + set {_uniqueStorage()._links = newValue} + } + /// Returns true if `links` has been explicitly set. + public var hasLinks: Bool {return _storage._links != nil} + /// Clears the value of `links`. Subsequent reads from it will return its default value. + public mutating func clearLinks() {_storage._links = nil} + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 2: try decoder.decodeSingularMessageField(value: &_storage._headers) + case 3: try decoder.decodeSingularMessageField(value: &_storage._content) + case 4: try decoder.decodeSingularMessageField(value: &_storage._links) + case 5: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 1) + } + if let v = _storage._headers { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + if let v = _storage._content { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if let v = _storage._links { + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 5) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_ResponseOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".ResponseOrReference" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var response: Openapi_V3_Response { + get { + if case .response(let v)? = _storage._oneof {return v} + return Openapi_V3_Response() + } + set {_uniqueStorage()._oneof = .response(newValue)} + } + + public var reference: Openapi_V3_Reference { + get { + if case .reference(let v)? = _storage._oneof {return v} + return Openapi_V3_Reference() + } + set {_uniqueStorage()._oneof = .reference(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case response(Openapi_V3_Response) + case reference(Openapi_V3_Reference) + + public static func ==(lhs: Openapi_V3_ResponseOrReference.OneOf_Oneof, rhs: Openapi_V3_ResponseOrReference.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.response(let l), .response(let r)): return l == r + case (.reference(let l), .reference(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V3_Response? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .response(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .response(v)} + case 2: + var v: Openapi_V3_Reference? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .reference(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .reference(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .response(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .reference(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// A container for the expected responses of an operation. The container maps a HTTP response code to the expected response. The documentation is not necessarily expected to cover all possible HTTP response codes because they may not be known in advance. However, documentation is expected to cover a successful operation response and any known errors. The `default` MAY be used as a default response object for all HTTP codes that are not covered individually by the specification. The `Responses Object` MUST contain at least one response code, and it SHOULD be the response for a successful operation call. +public struct Openapi_V3_Responses: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Responses" + + public var `default`: Openapi_V3_ResponseOrReference { + get {return _storage._default ?? Openapi_V3_ResponseOrReference()} + set {_uniqueStorage()._default = newValue} + } + /// Returns true if ``default`` has been explicitly set. + public var hasDefault: Bool {return _storage._default != nil} + /// Clears the value of ``default``. Subsequent reads from it will return its default value. + public mutating func clearDefault() {_storage._default = nil} + + public var responseOrReference: [Openapi_V3_NamedResponseOrReference] { + get {return _storage._responseOrReference} + set {_uniqueStorage()._responseOrReference = newValue} + } + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularMessageField(value: &_storage._default) + case 2: try decoder.decodeRepeatedMessageField(value: &_storage._responseOrReference) + case 3: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if let v = _storage._default { + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + } + if !_storage._responseOrReference.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._responseOrReference, fieldNumber: 2) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 3) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_ResponsesOrReferences: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".ResponsesOrReferences" + + public var additionalProperties: [Openapi_V3_NamedResponseOrReference] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// The Schema Object allows the definition of input and output data types. These types can be objects, but also primitives and arrays. This object is an extended subset of the JSON Schema Specification Wright Draft 00. For more information about the properties, see JSON Schema Core and JSON Schema Validation. Unless stated otherwise, the property definitions follow the JSON Schema. +public struct Openapi_V3_Schema: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Schema" + + public var nullable: Bool { + get {return _storage._nullable} + set {_uniqueStorage()._nullable = newValue} + } + + public var discriminator: Openapi_V3_Discriminator { + get {return _storage._discriminator ?? Openapi_V3_Discriminator()} + set {_uniqueStorage()._discriminator = newValue} + } + /// Returns true if `discriminator` has been explicitly set. + public var hasDiscriminator: Bool {return _storage._discriminator != nil} + /// Clears the value of `discriminator`. Subsequent reads from it will return its default value. + public mutating func clearDiscriminator() {_storage._discriminator = nil} + + public var readOnly: Bool { + get {return _storage._readOnly} + set {_uniqueStorage()._readOnly = newValue} + } + + public var writeOnly: Bool { + get {return _storage._writeOnly} + set {_uniqueStorage()._writeOnly = newValue} + } + + public var xml: Openapi_V3_Xml { + get {return _storage._xml ?? Openapi_V3_Xml()} + set {_uniqueStorage()._xml = newValue} + } + /// Returns true if `xml` has been explicitly set. + public var hasXml: Bool {return _storage._xml != nil} + /// Clears the value of `xml`. Subsequent reads from it will return its default value. + public mutating func clearXml() {_storage._xml = nil} + + public var externalDocs: Openapi_V3_ExternalDocs { + get {return _storage._externalDocs ?? Openapi_V3_ExternalDocs()} + set {_uniqueStorage()._externalDocs = newValue} + } + /// Returns true if `externalDocs` has been explicitly set. + public var hasExternalDocs: Bool {return _storage._externalDocs != nil} + /// Clears the value of `externalDocs`. Subsequent reads from it will return its default value. + public mutating func clearExternalDocs() {_storage._externalDocs = nil} + + public var example: Openapi_V3_Any { + get {return _storage._example ?? Openapi_V3_Any()} + set {_uniqueStorage()._example = newValue} + } + /// Returns true if `example` has been explicitly set. + public var hasExample: Bool {return _storage._example != nil} + /// Clears the value of `example`. Subsequent reads from it will return its default value. + public mutating func clearExample() {_storage._example = nil} + + public var deprecated: Bool { + get {return _storage._deprecated} + set {_uniqueStorage()._deprecated = newValue} + } + + public var title: String { + get {return _storage._title} + set {_uniqueStorage()._title = newValue} + } + + public var multipleOf: Double { + get {return _storage._multipleOf} + set {_uniqueStorage()._multipleOf = newValue} + } + + public var maximum: Double { + get {return _storage._maximum} + set {_uniqueStorage()._maximum = newValue} + } + + public var exclusiveMaximum: Bool { + get {return _storage._exclusiveMaximum} + set {_uniqueStorage()._exclusiveMaximum = newValue} + } + + public var minimum: Double { + get {return _storage._minimum} + set {_uniqueStorage()._minimum = newValue} + } + + public var exclusiveMinimum: Bool { + get {return _storage._exclusiveMinimum} + set {_uniqueStorage()._exclusiveMinimum = newValue} + } + + public var maxLength: Int64 { + get {return _storage._maxLength} + set {_uniqueStorage()._maxLength = newValue} + } + + public var minLength: Int64 { + get {return _storage._minLength} + set {_uniqueStorage()._minLength = newValue} + } + + public var pattern: String { + get {return _storage._pattern} + set {_uniqueStorage()._pattern = newValue} + } + + public var maxItems: Int64 { + get {return _storage._maxItems} + set {_uniqueStorage()._maxItems = newValue} + } + + public var minItems: Int64 { + get {return _storage._minItems} + set {_uniqueStorage()._minItems = newValue} + } + + public var uniqueItems: Bool { + get {return _storage._uniqueItems} + set {_uniqueStorage()._uniqueItems = newValue} + } + + public var maxProperties: Int64 { + get {return _storage._maxProperties} + set {_uniqueStorage()._maxProperties = newValue} + } + + public var minProperties: Int64 { + get {return _storage._minProperties} + set {_uniqueStorage()._minProperties = newValue} + } + + public var required: [String] { + get {return _storage._required} + set {_uniqueStorage()._required = newValue} + } + + public var `enum`: [Openapi_V3_Any] { + get {return _storage._enum} + set {_uniqueStorage()._enum = newValue} + } + + public var type: String { + get {return _storage._type} + set {_uniqueStorage()._type = newValue} + } + + public var allOf: [Openapi_V3_SchemaOrReference] { + get {return _storage._allOf} + set {_uniqueStorage()._allOf = newValue} + } + + public var oneOf: [Openapi_V3_SchemaOrReference] { + get {return _storage._oneOf} + set {_uniqueStorage()._oneOf = newValue} + } + + public var anyOf: [Openapi_V3_SchemaOrReference] { + get {return _storage._anyOf} + set {_uniqueStorage()._anyOf = newValue} + } + + public var not: Openapi_V3_Schema { + get {return _storage._not ?? Openapi_V3_Schema()} + set {_uniqueStorage()._not = newValue} + } + /// Returns true if `not` has been explicitly set. + public var hasNot: Bool {return _storage._not != nil} + /// Clears the value of `not`. Subsequent reads from it will return its default value. + public mutating func clearNot() {_storage._not = nil} + + public var items: Openapi_V3_ItemsItem { + get {return _storage._items ?? Openapi_V3_ItemsItem()} + set {_uniqueStorage()._items = newValue} + } + /// Returns true if `items` has been explicitly set. + public var hasItems: Bool {return _storage._items != nil} + /// Clears the value of `items`. Subsequent reads from it will return its default value. + public mutating func clearItems() {_storage._items = nil} + + public var properties: Openapi_V3_Properties { + get {return _storage._properties ?? Openapi_V3_Properties()} + set {_uniqueStorage()._properties = newValue} + } + /// Returns true if `properties` has been explicitly set. + public var hasProperties: Bool {return _storage._properties != nil} + /// Clears the value of `properties`. Subsequent reads from it will return its default value. + public mutating func clearProperties() {_storage._properties = nil} + + public var additionalProperties: Openapi_V3_AdditionalPropertiesItem { + get {return _storage._additionalProperties ?? Openapi_V3_AdditionalPropertiesItem()} + set {_uniqueStorage()._additionalProperties = newValue} + } + /// Returns true if `additionalProperties` has been explicitly set. + public var hasAdditionalProperties: Bool {return _storage._additionalProperties != nil} + /// Clears the value of `additionalProperties`. Subsequent reads from it will return its default value. + public mutating func clearAdditionalProperties() {_storage._additionalProperties = nil} + + public var `default`: Openapi_V3_DefaultType { + get {return _storage._default ?? Openapi_V3_DefaultType()} + set {_uniqueStorage()._default = newValue} + } + /// Returns true if ``default`` has been explicitly set. + public var hasDefault: Bool {return _storage._default != nil} + /// Clears the value of ``default``. Subsequent reads from it will return its default value. + public mutating func clearDefault() {_storage._default = nil} + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var format: String { + get {return _storage._format} + set {_uniqueStorage()._format = newValue} + } + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularBoolField(value: &_storage._nullable) + case 2: try decoder.decodeSingularMessageField(value: &_storage._discriminator) + case 3: try decoder.decodeSingularBoolField(value: &_storage._readOnly) + case 4: try decoder.decodeSingularBoolField(value: &_storage._writeOnly) + case 5: try decoder.decodeSingularMessageField(value: &_storage._xml) + case 6: try decoder.decodeSingularMessageField(value: &_storage._externalDocs) + case 7: try decoder.decodeSingularMessageField(value: &_storage._example) + case 8: try decoder.decodeSingularBoolField(value: &_storage._deprecated) + case 9: try decoder.decodeSingularStringField(value: &_storage._title) + case 10: try decoder.decodeSingularDoubleField(value: &_storage._multipleOf) + case 11: try decoder.decodeSingularDoubleField(value: &_storage._maximum) + case 12: try decoder.decodeSingularBoolField(value: &_storage._exclusiveMaximum) + case 13: try decoder.decodeSingularDoubleField(value: &_storage._minimum) + case 14: try decoder.decodeSingularBoolField(value: &_storage._exclusiveMinimum) + case 15: try decoder.decodeSingularInt64Field(value: &_storage._maxLength) + case 16: try decoder.decodeSingularInt64Field(value: &_storage._minLength) + case 17: try decoder.decodeSingularStringField(value: &_storage._pattern) + case 18: try decoder.decodeSingularInt64Field(value: &_storage._maxItems) + case 19: try decoder.decodeSingularInt64Field(value: &_storage._minItems) + case 20: try decoder.decodeSingularBoolField(value: &_storage._uniqueItems) + case 21: try decoder.decodeSingularInt64Field(value: &_storage._maxProperties) + case 22: try decoder.decodeSingularInt64Field(value: &_storage._minProperties) + case 23: try decoder.decodeRepeatedStringField(value: &_storage._required) + case 24: try decoder.decodeRepeatedMessageField(value: &_storage._enum) + case 25: try decoder.decodeSingularStringField(value: &_storage._type) + case 26: try decoder.decodeRepeatedMessageField(value: &_storage._allOf) + case 27: try decoder.decodeRepeatedMessageField(value: &_storage._oneOf) + case 28: try decoder.decodeRepeatedMessageField(value: &_storage._anyOf) + case 29: try decoder.decodeSingularMessageField(value: &_storage._not) + case 30: try decoder.decodeSingularMessageField(value: &_storage._items) + case 31: try decoder.decodeSingularMessageField(value: &_storage._properties) + case 32: try decoder.decodeSingularMessageField(value: &_storage._additionalProperties) + case 33: try decoder.decodeSingularMessageField(value: &_storage._default) + case 34: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 35: try decoder.decodeSingularStringField(value: &_storage._format) + case 36: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if _storage._nullable != false { + try visitor.visitSingularBoolField(value: _storage._nullable, fieldNumber: 1) + } + if let v = _storage._discriminator { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + if _storage._readOnly != false { + try visitor.visitSingularBoolField(value: _storage._readOnly, fieldNumber: 3) + } + if _storage._writeOnly != false { + try visitor.visitSingularBoolField(value: _storage._writeOnly, fieldNumber: 4) + } + if let v = _storage._xml { + try visitor.visitSingularMessageField(value: v, fieldNumber: 5) + } + if let v = _storage._externalDocs { + try visitor.visitSingularMessageField(value: v, fieldNumber: 6) + } + if let v = _storage._example { + try visitor.visitSingularMessageField(value: v, fieldNumber: 7) + } + if _storage._deprecated != false { + try visitor.visitSingularBoolField(value: _storage._deprecated, fieldNumber: 8) + } + if !_storage._title.isEmpty { + try visitor.visitSingularStringField(value: _storage._title, fieldNumber: 9) + } + if _storage._multipleOf != 0 { + try visitor.visitSingularDoubleField(value: _storage._multipleOf, fieldNumber: 10) + } + if _storage._maximum != 0 { + try visitor.visitSingularDoubleField(value: _storage._maximum, fieldNumber: 11) + } + if _storage._exclusiveMaximum != false { + try visitor.visitSingularBoolField(value: _storage._exclusiveMaximum, fieldNumber: 12) + } + if _storage._minimum != 0 { + try visitor.visitSingularDoubleField(value: _storage._minimum, fieldNumber: 13) + } + if _storage._exclusiveMinimum != false { + try visitor.visitSingularBoolField(value: _storage._exclusiveMinimum, fieldNumber: 14) + } + if _storage._maxLength != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxLength, fieldNumber: 15) + } + if _storage._minLength != 0 { + try visitor.visitSingularInt64Field(value: _storage._minLength, fieldNumber: 16) + } + if !_storage._pattern.isEmpty { + try visitor.visitSingularStringField(value: _storage._pattern, fieldNumber: 17) + } + if _storage._maxItems != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxItems, fieldNumber: 18) + } + if _storage._minItems != 0 { + try visitor.visitSingularInt64Field(value: _storage._minItems, fieldNumber: 19) + } + if _storage._uniqueItems != false { + try visitor.visitSingularBoolField(value: _storage._uniqueItems, fieldNumber: 20) + } + if _storage._maxProperties != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxProperties, fieldNumber: 21) + } + if _storage._minProperties != 0 { + try visitor.visitSingularInt64Field(value: _storage._minProperties, fieldNumber: 22) + } + if !_storage._required.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._required, fieldNumber: 23) + } + if !_storage._enum.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._enum, fieldNumber: 24) + } + if !_storage._type.isEmpty { + try visitor.visitSingularStringField(value: _storage._type, fieldNumber: 25) + } + if !_storage._allOf.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._allOf, fieldNumber: 26) + } + if !_storage._oneOf.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._oneOf, fieldNumber: 27) + } + if !_storage._anyOf.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._anyOf, fieldNumber: 28) + } + if let v = _storage._not { + try visitor.visitSingularMessageField(value: v, fieldNumber: 29) + } + if let v = _storage._items { + try visitor.visitSingularMessageField(value: v, fieldNumber: 30) + } + if let v = _storage._properties { + try visitor.visitSingularMessageField(value: v, fieldNumber: 31) + } + if let v = _storage._additionalProperties { + try visitor.visitSingularMessageField(value: v, fieldNumber: 32) + } + if let v = _storage._default { + try visitor.visitSingularMessageField(value: v, fieldNumber: 33) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 34) + } + if !_storage._format.isEmpty { + try visitor.visitSingularStringField(value: _storage._format, fieldNumber: 35) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 36) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_SchemaOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".SchemaOrReference" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var schema: Openapi_V3_Schema { + get { + if case .schema(let v)? = _storage._oneof {return v} + return Openapi_V3_Schema() + } + set {_uniqueStorage()._oneof = .schema(newValue)} + } + + public var reference: Openapi_V3_Reference { + get { + if case .reference(let v)? = _storage._oneof {return v} + return Openapi_V3_Reference() + } + set {_uniqueStorage()._oneof = .reference(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case schema(Openapi_V3_Schema) + case reference(Openapi_V3_Reference) + + public static func ==(lhs: Openapi_V3_SchemaOrReference.OneOf_Oneof, rhs: Openapi_V3_SchemaOrReference.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.schema(let l), .schema(let r)): return l == r + case (.reference(let l), .reference(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V3_Schema? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .schema(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .schema(v)} + case 2: + var v: Openapi_V3_Reference? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .reference(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .reference(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .schema(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .reference(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_SchemasOrReferences: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".SchemasOrReferences" + + public var additionalProperties: [Openapi_V3_NamedSchemaOrReference] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Lists the required security schemes to execute this operation. The name used for each property MUST correspond to a security scheme declared in the Security Schemes under the Components Object. Security Requirement Objects that contain multiple schemes require that all schemes MUST be satisfied for a request to be authorized. This enables support for scenarios where multiple query parameters or HTTP headers are required to convey security information. When a list of Security Requirement Objects is defined on the Open API object or Operation Object, only one of Security Requirement Objects in the list needs to be satisfied to authorize the request. +public struct Openapi_V3_SecurityRequirement: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".SecurityRequirement" + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let _ = try decoder.nextFieldNumber() { + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Defines a security scheme that can be used by the operations. Supported schemes are HTTP authentication, an API key (either as a header or as a query parameter), OAuth2's common flows (implicit, password, application and access code) as defined in RFC6749, and OpenID Connect Discovery. +public struct Openapi_V3_SecurityScheme: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".SecurityScheme" + + public var type: String { + get {return _storage._type} + set {_uniqueStorage()._type = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + public var `in`: String { + get {return _storage._in} + set {_uniqueStorage()._in = newValue} + } + + public var scheme: String { + get {return _storage._scheme} + set {_uniqueStorage()._scheme = newValue} + } + + public var bearerFormat: String { + get {return _storage._bearerFormat} + set {_uniqueStorage()._bearerFormat = newValue} + } + + public var flows: Openapi_V3_OauthFlows { + get {return _storage._flows ?? Openapi_V3_OauthFlows()} + set {_uniqueStorage()._flows = newValue} + } + /// Returns true if `flows` has been explicitly set. + public var hasFlows: Bool {return _storage._flows != nil} + /// Clears the value of `flows`. Subsequent reads from it will return its default value. + public mutating func clearFlows() {_storage._flows = nil} + + public var openIDConnectURL: String { + get {return _storage._openIDConnectURL} + set {_uniqueStorage()._openIDConnectURL = newValue} + } + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._type) + case 2: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 3: try decoder.decodeSingularStringField(value: &_storage._name) + case 4: try decoder.decodeSingularStringField(value: &_storage._in) + case 5: try decoder.decodeSingularStringField(value: &_storage._scheme) + case 6: try decoder.decodeSingularStringField(value: &_storage._bearerFormat) + case 7: try decoder.decodeSingularMessageField(value: &_storage._flows) + case 8: try decoder.decodeSingularStringField(value: &_storage._openIDConnectURL) + case 9: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._type.isEmpty { + try visitor.visitSingularStringField(value: _storage._type, fieldNumber: 1) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 2) + } + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 3) + } + if !_storage._in.isEmpty { + try visitor.visitSingularStringField(value: _storage._in, fieldNumber: 4) + } + if !_storage._scheme.isEmpty { + try visitor.visitSingularStringField(value: _storage._scheme, fieldNumber: 5) + } + if !_storage._bearerFormat.isEmpty { + try visitor.visitSingularStringField(value: _storage._bearerFormat, fieldNumber: 6) + } + if let v = _storage._flows { + try visitor.visitSingularMessageField(value: v, fieldNumber: 7) + } + if !_storage._openIDConnectURL.isEmpty { + try visitor.visitSingularStringField(value: _storage._openIDConnectURL, fieldNumber: 8) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 9) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_SecuritySchemeOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".SecuritySchemeOrReference" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var securityScheme: Openapi_V3_SecurityScheme { + get { + if case .securityScheme(let v)? = _storage._oneof {return v} + return Openapi_V3_SecurityScheme() + } + set {_uniqueStorage()._oneof = .securityScheme(newValue)} + } + + public var reference: Openapi_V3_Reference { + get { + if case .reference(let v)? = _storage._oneof {return v} + return Openapi_V3_Reference() + } + set {_uniqueStorage()._oneof = .reference(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case securityScheme(Openapi_V3_SecurityScheme) + case reference(Openapi_V3_Reference) + + public static func ==(lhs: Openapi_V3_SecuritySchemeOrReference.OneOf_Oneof, rhs: Openapi_V3_SecuritySchemeOrReference.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.securityScheme(let l), .securityScheme(let r)): return l == r + case (.reference(let l), .reference(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V3_SecurityScheme? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .securityScheme(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .securityScheme(v)} + case 2: + var v: Openapi_V3_Reference? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .reference(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .reference(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .securityScheme(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .reference(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_SecuritySchemesOrReferences: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".SecuritySchemesOrReferences" + + public var additionalProperties: [Openapi_V3_NamedSecuritySchemeOrReference] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// An object representing a Server. +public struct Openapi_V3_Server: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Server" + + public var url: String { + get {return _storage._url} + set {_uniqueStorage()._url = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var variables: Openapi_V3_ServerVariables { + get {return _storage._variables ?? Openapi_V3_ServerVariables()} + set {_uniqueStorage()._variables = newValue} + } + /// Returns true if `variables` has been explicitly set. + public var hasVariables: Bool {return _storage._variables != nil} + /// Clears the value of `variables`. Subsequent reads from it will return its default value. + public mutating func clearVariables() {_storage._variables = nil} + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._url) + case 2: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 3: try decoder.decodeSingularMessageField(value: &_storage._variables) + case 4: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._url.isEmpty { + try visitor.visitSingularStringField(value: _storage._url, fieldNumber: 1) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 2) + } + if let v = _storage._variables { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 4) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// An object representing a Server Variable for server URL template substitution. +public struct Openapi_V3_ServerVariable: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".ServerVariable" + + public var `enum`: [String] = [] + + public var `default`: String = String() + + public var description_p: String = String() + + public var specificationExtension: [Openapi_V3_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedStringField(value: &self.`enum`) + case 2: try decoder.decodeSingularStringField(value: &self.`default`) + case 3: try decoder.decodeSingularStringField(value: &self.description_p) + case 4: try decoder.decodeRepeatedMessageField(value: &self.specificationExtension) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.`enum`.isEmpty { + try visitor.visitRepeatedStringField(value: self.`enum`, fieldNumber: 1) + } + if !self.`default`.isEmpty { + try visitor.visitSingularStringField(value: self.`default`, fieldNumber: 2) + } + if !self.description_p.isEmpty { + try visitor.visitSingularStringField(value: self.description_p, fieldNumber: 3) + } + if !self.specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: self.specificationExtension, fieldNumber: 4) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V3_ServerVariables: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".ServerVariables" + + public var additionalProperties: [Openapi_V3_NamedServerVariable] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Any property starting with x- is valid. +public struct Openapi_V3_SpecificationExtension: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".SpecificationExtension" + + public var oneof: Openapi_V3_SpecificationExtension.OneOf_Oneof? = nil + + public var number: Double { + get { + if case .number(let v)? = oneof {return v} + return 0 + } + set {oneof = .number(newValue)} + } + + public var boolean: Bool { + get { + if case .boolean(let v)? = oneof {return v} + return false + } + set {oneof = .boolean(newValue)} + } + + public var string: String { + get { + if case .string(let v)? = oneof {return v} + return String() + } + set {oneof = .string(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case number(Double) + case boolean(Bool) + case string(String) + + public static func ==(lhs: Openapi_V3_SpecificationExtension.OneOf_Oneof, rhs: Openapi_V3_SpecificationExtension.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.number(let l), .number(let r)): return l == r + case (.boolean(let l), .boolean(let r)): return l == r + case (.string(let l), .string(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + if self.oneof != nil {try decoder.handleConflictingOneOf()} + var v: Double? + try decoder.decodeSingularDoubleField(value: &v) + if let v = v {self.oneof = .number(v)} + case 2: + if self.oneof != nil {try decoder.handleConflictingOneOf()} + var v: Bool? + try decoder.decodeSingularBoolField(value: &v) + if let v = v {self.oneof = .boolean(v)} + case 3: + if self.oneof != nil {try decoder.handleConflictingOneOf()} + var v: String? + try decoder.decodeSingularStringField(value: &v) + if let v = v {self.oneof = .string(v)} + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + switch self.oneof { + case .number(let v)?: + try visitor.visitSingularDoubleField(value: v, fieldNumber: 1) + case .boolean(let v)?: + try visitor.visitSingularBoolField(value: v, fieldNumber: 2) + case .string(let v)?: + try visitor.visitSingularStringField(value: v, fieldNumber: 3) + case nil: break + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V3_StringArray: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".StringArray" + + public var value: [String] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedStringField(value: &self.value) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.value.isEmpty { + try visitor.visitRepeatedStringField(value: self.value, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V3_Strings: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Strings" + + public var additionalProperties: [Openapi_V3_NamedString] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Adds metadata to a single tag that is used by the Operation Object. It is not mandatory to have a Tag Object per tag defined in the Operation Object instances. +public struct Openapi_V3_Tag: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Tag" + + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var externalDocs: Openapi_V3_ExternalDocs { + get {return _storage._externalDocs ?? Openapi_V3_ExternalDocs()} + set {_uniqueStorage()._externalDocs = newValue} + } + /// Returns true if `externalDocs` has been explicitly set. + public var hasExternalDocs: Bool {return _storage._externalDocs != nil} + /// Clears the value of `externalDocs`. Subsequent reads from it will return its default value. + public mutating func clearExternalDocs() {_storage._externalDocs = nil} + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 3: try decoder.decodeSingularMessageField(value: &_storage._externalDocs) + case 4: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 2) + } + if let v = _storage._externalDocs { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 4) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// A metadata object that allows for more fine-tuned XML model definitions. When using arrays, XML element names are *not* inferred (for singular/plural forms) and the `name` property SHOULD be used to add that information. See examples for expected behavior. +public struct Openapi_V3_Xml: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Xml" + + public var name: String = String() + + public var namespace: String = String() + + public var prefix: String = String() + + public var attribute: Bool = false + + public var wrapped: Bool = false + + public var specificationExtension: [Openapi_V3_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.name) + case 2: try decoder.decodeSingularStringField(value: &self.namespace) + case 3: try decoder.decodeSingularStringField(value: &self.prefix) + case 4: try decoder.decodeSingularBoolField(value: &self.attribute) + case 5: try decoder.decodeSingularBoolField(value: &self.wrapped) + case 6: try decoder.decodeRepeatedMessageField(value: &self.specificationExtension) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.name.isEmpty { + try visitor.visitSingularStringField(value: self.name, fieldNumber: 1) + } + if !self.namespace.isEmpty { + try visitor.visitSingularStringField(value: self.namespace, fieldNumber: 2) + } + if !self.prefix.isEmpty { + try visitor.visitSingularStringField(value: self.prefix, fieldNumber: 3) + } + if self.attribute != false { + try visitor.visitSingularBoolField(value: self.attribute, fieldNumber: 4) + } + if self.wrapped != false { + try visitor.visitSingularBoolField(value: self.wrapped, fieldNumber: 5) + } + if !self.specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: self.specificationExtension, fieldNumber: 6) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +// MARK: - Code below here is support for the SwiftProtobuf runtime. + +fileprivate let _protobuf_package = "openapi.v3" + +extension Openapi_V3_AdditionalPropertiesItem: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "schema_or_reference"), + 2: .same(proto: "boolean"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V3_AdditionalPropertiesItem.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_AdditionalPropertiesItem) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Any: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "value"), + 2: .same(proto: "yaml"), + ] + + fileprivate class _StorageClass { + var _value: SwiftProtobuf.Google_Protobuf_Any? = nil + var _yaml: String = String() + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _value = source._value + _yaml = source._yaml + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Any) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._value != other_storage._value {return false} + if _storage._yaml != other_storage._yaml {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_AnyOrExpression: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "any"), + 2: .same(proto: "expression"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V3_AnyOrExpression.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_AnyOrExpression) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_AnysOrExpressions: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_AnysOrExpressions) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Callback: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "path"), + 2: .standard(proto: "specification_extension"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Callback) -> Bool { + if self.path != other.path {return false} + if self.specificationExtension != other.specificationExtension {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_CallbackOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "callback"), + 2: .same(proto: "reference"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V3_CallbackOrReference.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_CallbackOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_CallbacksOrReferences: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_CallbacksOrReferences) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Components: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "schemas"), + 2: .same(proto: "responses"), + 3: .same(proto: "parameters"), + 4: .same(proto: "examples"), + 5: .standard(proto: "request_bodies"), + 6: .same(proto: "headers"), + 7: .standard(proto: "security_schemes"), + 8: .same(proto: "links"), + 9: .same(proto: "callbacks"), + 10: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _schemas: Openapi_V3_SchemasOrReferences? = nil + var _responses: Openapi_V3_ResponsesOrReferences? = nil + var _parameters: Openapi_V3_ParametersOrReferences? = nil + var _examples: Openapi_V3_ExamplesOrReferences? = nil + var _requestBodies: Openapi_V3_RequestBodiesOrReferences? = nil + var _headers: Openapi_V3_HeadersOrReferences? = nil + var _securitySchemes: Openapi_V3_SecuritySchemesOrReferences? = nil + var _links: Openapi_V3_LinksOrReferences? = nil + var _callbacks: Openapi_V3_CallbacksOrReferences? = nil + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _schemas = source._schemas + _responses = source._responses + _parameters = source._parameters + _examples = source._examples + _requestBodies = source._requestBodies + _headers = source._headers + _securitySchemes = source._securitySchemes + _links = source._links + _callbacks = source._callbacks + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Components) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._schemas != other_storage._schemas {return false} + if _storage._responses != other_storage._responses {return false} + if _storage._parameters != other_storage._parameters {return false} + if _storage._examples != other_storage._examples {return false} + if _storage._requestBodies != other_storage._requestBodies {return false} + if _storage._headers != other_storage._headers {return false} + if _storage._securitySchemes != other_storage._securitySchemes {return false} + if _storage._links != other_storage._links {return false} + if _storage._callbacks != other_storage._callbacks {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Contact: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "url"), + 3: .same(proto: "email"), + 4: .standard(proto: "specification_extension"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Contact) -> Bool { + if self.name != other.name {return false} + if self.url != other.url {return false} + if self.email != other.email {return false} + if self.specificationExtension != other.specificationExtension {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_DefaultType: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "number"), + 2: .same(proto: "boolean"), + 3: .same(proto: "string"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_DefaultType) -> Bool { + if self.oneof != other.oneof {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Discriminator: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "property_name"), + 2: .same(proto: "mapping"), + ] + + fileprivate class _StorageClass { + var _propertyName: String = String() + var _mapping: Openapi_V3_Strings? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _propertyName = source._propertyName + _mapping = source._mapping + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Discriminator) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._propertyName != other_storage._propertyName {return false} + if _storage._mapping != other_storage._mapping {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Document: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "openapi"), + 2: .same(proto: "info"), + 3: .same(proto: "servers"), + 4: .same(proto: "paths"), + 5: .same(proto: "components"), + 6: .same(proto: "security"), + 7: .same(proto: "tags"), + 8: .standard(proto: "external_docs"), + 9: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _openapi: String = String() + var _info: Openapi_V3_Info? = nil + var _servers: [Openapi_V3_Server] = [] + var _paths: Openapi_V3_Paths? = nil + var _components: Openapi_V3_Components? = nil + var _security: [Openapi_V3_SecurityRequirement] = [] + var _tags: [Openapi_V3_Tag] = [] + var _externalDocs: Openapi_V3_ExternalDocs? = nil + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _openapi = source._openapi + _info = source._info + _servers = source._servers + _paths = source._paths + _components = source._components + _security = source._security + _tags = source._tags + _externalDocs = source._externalDocs + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Document) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._openapi != other_storage._openapi {return false} + if _storage._info != other_storage._info {return false} + if _storage._servers != other_storage._servers {return false} + if _storage._paths != other_storage._paths {return false} + if _storage._components != other_storage._components {return false} + if _storage._security != other_storage._security {return false} + if _storage._tags != other_storage._tags {return false} + if _storage._externalDocs != other_storage._externalDocs {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Encoding: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "content_type"), + 2: .same(proto: "headers"), + 3: .same(proto: "style"), + 4: .same(proto: "explode"), + 5: .standard(proto: "allow_reserved"), + 6: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _contentType: String = String() + var _headers: Openapi_V3_HeadersOrReferences? = nil + var _style: String = String() + var _explode: Bool = false + var _allowReserved: Bool = false + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _contentType = source._contentType + _headers = source._headers + _style = source._style + _explode = source._explode + _allowReserved = source._allowReserved + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Encoding) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._contentType != other_storage._contentType {return false} + if _storage._headers != other_storage._headers {return false} + if _storage._style != other_storage._style {return false} + if _storage._explode != other_storage._explode {return false} + if _storage._allowReserved != other_storage._allowReserved {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Encodings: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Encodings) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Example: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "summary"), + 2: .same(proto: "description"), + 3: .same(proto: "value"), + 4: .standard(proto: "external_value"), + 5: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _summary: String = String() + var _description_p: String = String() + var _value: Openapi_V3_Any? = nil + var _externalValue: String = String() + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _summary = source._summary + _description_p = source._description_p + _value = source._value + _externalValue = source._externalValue + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Example) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._summary != other_storage._summary {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._value != other_storage._value {return false} + if _storage._externalValue != other_storage._externalValue {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_ExampleOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "example"), + 2: .same(proto: "reference"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V3_ExampleOrReference.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_ExampleOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Examples: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap = SwiftProtobuf._NameMap() + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Examples) -> Bool { + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_ExamplesOrReferences: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_ExamplesOrReferences) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Expression: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Expression) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_ExternalDocs: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "description"), + 2: .same(proto: "url"), + 3: .standard(proto: "specification_extension"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_ExternalDocs) -> Bool { + if self.description_p != other.description_p {return false} + if self.url != other.url {return false} + if self.specificationExtension != other.specificationExtension {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Header: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "description"), + 2: .same(proto: "required"), + 3: .same(proto: "deprecated"), + 4: .standard(proto: "allow_empty_value"), + 5: .same(proto: "style"), + 6: .same(proto: "explode"), + 7: .standard(proto: "allow_reserved"), + 8: .same(proto: "schema"), + 9: .same(proto: "example"), + 10: .same(proto: "examples"), + 11: .same(proto: "content"), + 12: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _description_p: String = String() + var _required: Bool = false + var _deprecated: Bool = false + var _allowEmptyValue: Bool = false + var _style: String = String() + var _explode: Bool = false + var _allowReserved: Bool = false + var _schema: Openapi_V3_SchemaOrReference? = nil + var _example: Openapi_V3_Any? = nil + var _examples: Openapi_V3_ExamplesOrReferences? = nil + var _content: Openapi_V3_MediaTypes? = nil + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _description_p = source._description_p + _required = source._required + _deprecated = source._deprecated + _allowEmptyValue = source._allowEmptyValue + _style = source._style + _explode = source._explode + _allowReserved = source._allowReserved + _schema = source._schema + _example = source._example + _examples = source._examples + _content = source._content + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Header) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._description_p != other_storage._description_p {return false} + if _storage._required != other_storage._required {return false} + if _storage._deprecated != other_storage._deprecated {return false} + if _storage._allowEmptyValue != other_storage._allowEmptyValue {return false} + if _storage._style != other_storage._style {return false} + if _storage._explode != other_storage._explode {return false} + if _storage._allowReserved != other_storage._allowReserved {return false} + if _storage._schema != other_storage._schema {return false} + if _storage._example != other_storage._example {return false} + if _storage._examples != other_storage._examples {return false} + if _storage._content != other_storage._content {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_HeaderOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "header"), + 2: .same(proto: "reference"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V3_HeaderOrReference.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_HeaderOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_HeadersOrReferences: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_HeadersOrReferences) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Info: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "title"), + 2: .same(proto: "description"), + 3: .standard(proto: "terms_of_service"), + 4: .same(proto: "contact"), + 5: .same(proto: "license"), + 6: .same(proto: "version"), + 7: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _title: String = String() + var _description_p: String = String() + var _termsOfService: String = String() + var _contact: Openapi_V3_Contact? = nil + var _license: Openapi_V3_License? = nil + var _version: String = String() + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _title = source._title + _description_p = source._description_p + _termsOfService = source._termsOfService + _contact = source._contact + _license = source._license + _version = source._version + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Info) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._title != other_storage._title {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._termsOfService != other_storage._termsOfService {return false} + if _storage._contact != other_storage._contact {return false} + if _storage._license != other_storage._license {return false} + if _storage._version != other_storage._version {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_ItemsItem: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "schema_or_reference"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_ItemsItem) -> Bool { + if self.schemaOrReference != other.schemaOrReference {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_License: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "url"), + 3: .standard(proto: "specification_extension"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_License) -> Bool { + if self.name != other.name {return false} + if self.url != other.url {return false} + if self.specificationExtension != other.specificationExtension {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Link: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "operation_ref"), + 2: .standard(proto: "operation_id"), + 3: .same(proto: "parameters"), + 4: .standard(proto: "request_body"), + 5: .same(proto: "description"), + 6: .same(proto: "server"), + 7: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _operationRef: String = String() + var _operationID: String = String() + var _parameters: Openapi_V3_AnysOrExpressions? = nil + var _requestBody: Openapi_V3_AnyOrExpression? = nil + var _description_p: String = String() + var _server: Openapi_V3_Server? = nil + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _operationRef = source._operationRef + _operationID = source._operationID + _parameters = source._parameters + _requestBody = source._requestBody + _description_p = source._description_p + _server = source._server + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Link) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._operationRef != other_storage._operationRef {return false} + if _storage._operationID != other_storage._operationID {return false} + if _storage._parameters != other_storage._parameters {return false} + if _storage._requestBody != other_storage._requestBody {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._server != other_storage._server {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_LinkOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "link"), + 2: .same(proto: "reference"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V3_LinkOrReference.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_LinkOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_LinksOrReferences: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_LinksOrReferences) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_MediaType: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "schema"), + 2: .same(proto: "example"), + 3: .same(proto: "examples"), + 4: .same(proto: "encoding"), + 5: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _schema: Openapi_V3_SchemaOrReference? = nil + var _example: Openapi_V3_Any? = nil + var _examples: Openapi_V3_ExamplesOrReferences? = nil + var _encoding: Openapi_V3_Encodings? = nil + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _schema = source._schema + _example = source._example + _examples = source._examples + _encoding = source._encoding + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_MediaType) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._schema != other_storage._schema {return false} + if _storage._example != other_storage._example {return false} + if _storage._examples != other_storage._examples {return false} + if _storage._encoding != other_storage._encoding {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_MediaTypes: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_MediaTypes) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_NamedAny: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V3_Any? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_NamedAny) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_NamedAnyOrExpression: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V3_AnyOrExpression? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_NamedAnyOrExpression) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_NamedCallbackOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V3_CallbackOrReference? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_NamedCallbackOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_NamedEncoding: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V3_Encoding? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_NamedEncoding) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_NamedExampleOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V3_ExampleOrReference? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_NamedExampleOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_NamedHeaderOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V3_HeaderOrReference? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_NamedHeaderOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_NamedLinkOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V3_LinkOrReference? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_NamedLinkOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_NamedMediaType: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V3_MediaType? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_NamedMediaType) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_NamedParameterOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V3_ParameterOrReference? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_NamedParameterOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_NamedPathItem: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V3_PathItem? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_NamedPathItem) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_NamedRequestBodyOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V3_RequestBodyOrReference? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_NamedRequestBodyOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_NamedResponseOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V3_ResponseOrReference? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_NamedResponseOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_NamedSchemaOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V3_SchemaOrReference? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_NamedSchemaOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_NamedSecuritySchemeOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V3_SecuritySchemeOrReference? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_NamedSecuritySchemeOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_NamedServerVariable: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V3_ServerVariable? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_NamedServerVariable) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_NamedString: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_NamedString) -> Bool { + if self.name != other.name {return false} + if self.value != other.value {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_OauthFlow: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "authorization_url"), + 2: .standard(proto: "token_url"), + 3: .standard(proto: "refresh_url"), + 4: .same(proto: "scopes"), + 5: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _authorizationURL: String = String() + var _tokenURL: String = String() + var _refreshURL: String = String() + var _scopes: Openapi_V3_Strings? = nil + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _authorizationURL = source._authorizationURL + _tokenURL = source._tokenURL + _refreshURL = source._refreshURL + _scopes = source._scopes + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_OauthFlow) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._authorizationURL != other_storage._authorizationURL {return false} + if _storage._tokenURL != other_storage._tokenURL {return false} + if _storage._refreshURL != other_storage._refreshURL {return false} + if _storage._scopes != other_storage._scopes {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_OauthFlows: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "implicit"), + 2: .same(proto: "password"), + 3: .standard(proto: "client_credentials"), + 4: .standard(proto: "authorization_code"), + 5: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _implicit: Openapi_V3_OauthFlow? = nil + var _password: Openapi_V3_OauthFlow? = nil + var _clientCredentials: Openapi_V3_OauthFlow? = nil + var _authorizationCode: Openapi_V3_OauthFlow? = nil + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _implicit = source._implicit + _password = source._password + _clientCredentials = source._clientCredentials + _authorizationCode = source._authorizationCode + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_OauthFlows) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._implicit != other_storage._implicit {return false} + if _storage._password != other_storage._password {return false} + if _storage._clientCredentials != other_storage._clientCredentials {return false} + if _storage._authorizationCode != other_storage._authorizationCode {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Object: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Object) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Operation: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "tags"), + 2: .same(proto: "summary"), + 3: .same(proto: "description"), + 4: .standard(proto: "external_docs"), + 5: .standard(proto: "operation_id"), + 6: .same(proto: "parameters"), + 7: .standard(proto: "request_body"), + 8: .same(proto: "responses"), + 9: .same(proto: "callbacks"), + 10: .same(proto: "deprecated"), + 11: .same(proto: "security"), + 12: .same(proto: "servers"), + 13: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _tags: [String] = [] + var _summary: String = String() + var _description_p: String = String() + var _externalDocs: Openapi_V3_ExternalDocs? = nil + var _operationID: String = String() + var _parameters: [Openapi_V3_ParameterOrReference] = [] + var _requestBody: Openapi_V3_RequestBodyOrReference? = nil + var _responses: Openapi_V3_Responses? = nil + var _callbacks: Openapi_V3_CallbacksOrReferences? = nil + var _deprecated: Bool = false + var _security: [Openapi_V3_SecurityRequirement] = [] + var _servers: [Openapi_V3_Server] = [] + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _tags = source._tags + _summary = source._summary + _description_p = source._description_p + _externalDocs = source._externalDocs + _operationID = source._operationID + _parameters = source._parameters + _requestBody = source._requestBody + _responses = source._responses + _callbacks = source._callbacks + _deprecated = source._deprecated + _security = source._security + _servers = source._servers + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Operation) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._tags != other_storage._tags {return false} + if _storage._summary != other_storage._summary {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._externalDocs != other_storage._externalDocs {return false} + if _storage._operationID != other_storage._operationID {return false} + if _storage._parameters != other_storage._parameters {return false} + if _storage._requestBody != other_storage._requestBody {return false} + if _storage._responses != other_storage._responses {return false} + if _storage._callbacks != other_storage._callbacks {return false} + if _storage._deprecated != other_storage._deprecated {return false} + if _storage._security != other_storage._security {return false} + if _storage._servers != other_storage._servers {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Parameter: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "in"), + 3: .same(proto: "description"), + 4: .same(proto: "required"), + 5: .same(proto: "deprecated"), + 6: .standard(proto: "allow_empty_value"), + 7: .same(proto: "style"), + 8: .same(proto: "explode"), + 9: .standard(proto: "allow_reserved"), + 10: .same(proto: "schema"), + 11: .same(proto: "example"), + 12: .same(proto: "examples"), + 13: .same(proto: "content"), + 14: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _in: String = String() + var _description_p: String = String() + var _required: Bool = false + var _deprecated: Bool = false + var _allowEmptyValue: Bool = false + var _style: String = String() + var _explode: Bool = false + var _allowReserved: Bool = false + var _schema: Openapi_V3_SchemaOrReference? = nil + var _example: Openapi_V3_Any? = nil + var _examples: Openapi_V3_ExamplesOrReferences? = nil + var _content: Openapi_V3_MediaTypes? = nil + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _in = source._in + _description_p = source._description_p + _required = source._required + _deprecated = source._deprecated + _allowEmptyValue = source._allowEmptyValue + _style = source._style + _explode = source._explode + _allowReserved = source._allowReserved + _schema = source._schema + _example = source._example + _examples = source._examples + _content = source._content + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Parameter) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._in != other_storage._in {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._required != other_storage._required {return false} + if _storage._deprecated != other_storage._deprecated {return false} + if _storage._allowEmptyValue != other_storage._allowEmptyValue {return false} + if _storage._style != other_storage._style {return false} + if _storage._explode != other_storage._explode {return false} + if _storage._allowReserved != other_storage._allowReserved {return false} + if _storage._schema != other_storage._schema {return false} + if _storage._example != other_storage._example {return false} + if _storage._examples != other_storage._examples {return false} + if _storage._content != other_storage._content {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_ParameterOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "parameter"), + 2: .same(proto: "reference"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V3_ParameterOrReference.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_ParameterOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_ParametersOrReferences: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_ParametersOrReferences) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_PathItem: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "_ref"), + 2: .same(proto: "summary"), + 3: .same(proto: "description"), + 4: .same(proto: "get"), + 5: .same(proto: "put"), + 6: .same(proto: "post"), + 7: .same(proto: "delete"), + 8: .same(proto: "options"), + 9: .same(proto: "head"), + 10: .same(proto: "patch"), + 11: .same(proto: "trace"), + 12: .same(proto: "servers"), + 13: .same(proto: "parameters"), + 14: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _ref: String = String() + var _summary: String = String() + var _description_p: String = String() + var _get: Openapi_V3_Operation? = nil + var _put: Openapi_V3_Operation? = nil + var _post: Openapi_V3_Operation? = nil + var _delete: Openapi_V3_Operation? = nil + var _options: Openapi_V3_Operation? = nil + var _head: Openapi_V3_Operation? = nil + var _patch: Openapi_V3_Operation? = nil + var _trace: Openapi_V3_Operation? = nil + var _servers: [Openapi_V3_Server] = [] + var _parameters: [Openapi_V3_ParameterOrReference] = [] + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _ref = source._ref + _summary = source._summary + _description_p = source._description_p + _get = source._get + _put = source._put + _post = source._post + _delete = source._delete + _options = source._options + _head = source._head + _patch = source._patch + _trace = source._trace + _servers = source._servers + _parameters = source._parameters + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_PathItem) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._ref != other_storage._ref {return false} + if _storage._summary != other_storage._summary {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._get != other_storage._get {return false} + if _storage._put != other_storage._put {return false} + if _storage._post != other_storage._post {return false} + if _storage._delete != other_storage._delete {return false} + if _storage._options != other_storage._options {return false} + if _storage._head != other_storage._head {return false} + if _storage._patch != other_storage._patch {return false} + if _storage._trace != other_storage._trace {return false} + if _storage._servers != other_storage._servers {return false} + if _storage._parameters != other_storage._parameters {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Paths: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "path"), + 2: .standard(proto: "specification_extension"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Paths) -> Bool { + if self.path != other.path {return false} + if self.specificationExtension != other.specificationExtension {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Properties: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Properties) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Reference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "_ref"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Reference) -> Bool { + if self.ref != other.ref {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_RequestBodiesOrReferences: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_RequestBodiesOrReferences) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_RequestBody: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "description"), + 2: .same(proto: "content"), + 3: .same(proto: "required"), + 4: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _description_p: String = String() + var _content: Openapi_V3_MediaTypes? = nil + var _required: Bool = false + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _description_p = source._description_p + _content = source._content + _required = source._required + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_RequestBody) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._description_p != other_storage._description_p {return false} + if _storage._content != other_storage._content {return false} + if _storage._required != other_storage._required {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_RequestBodyOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "request_body"), + 2: .same(proto: "reference"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V3_RequestBodyOrReference.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_RequestBodyOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Response: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "description"), + 2: .same(proto: "headers"), + 3: .same(proto: "content"), + 4: .same(proto: "links"), + 5: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _description_p: String = String() + var _headers: Openapi_V3_HeadersOrReferences? = nil + var _content: Openapi_V3_MediaTypes? = nil + var _links: Openapi_V3_LinksOrReferences? = nil + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _description_p = source._description_p + _headers = source._headers + _content = source._content + _links = source._links + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Response) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._description_p != other_storage._description_p {return false} + if _storage._headers != other_storage._headers {return false} + if _storage._content != other_storage._content {return false} + if _storage._links != other_storage._links {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_ResponseOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "response"), + 2: .same(proto: "reference"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V3_ResponseOrReference.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_ResponseOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Responses: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "default"), + 2: .standard(proto: "response_or_reference"), + 3: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _default: Openapi_V3_ResponseOrReference? = nil + var _responseOrReference: [Openapi_V3_NamedResponseOrReference] = [] + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _default = source._default + _responseOrReference = source._responseOrReference + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Responses) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._default != other_storage._default {return false} + if _storage._responseOrReference != other_storage._responseOrReference {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_ResponsesOrReferences: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_ResponsesOrReferences) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Schema: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "nullable"), + 2: .same(proto: "discriminator"), + 3: .standard(proto: "read_only"), + 4: .standard(proto: "write_only"), + 5: .same(proto: "xml"), + 6: .standard(proto: "external_docs"), + 7: .same(proto: "example"), + 8: .same(proto: "deprecated"), + 9: .same(proto: "title"), + 10: .standard(proto: "multiple_of"), + 11: .same(proto: "maximum"), + 12: .standard(proto: "exclusive_maximum"), + 13: .same(proto: "minimum"), + 14: .standard(proto: "exclusive_minimum"), + 15: .standard(proto: "max_length"), + 16: .standard(proto: "min_length"), + 17: .same(proto: "pattern"), + 18: .standard(proto: "max_items"), + 19: .standard(proto: "min_items"), + 20: .standard(proto: "unique_items"), + 21: .standard(proto: "max_properties"), + 22: .standard(proto: "min_properties"), + 23: .same(proto: "required"), + 24: .same(proto: "enum"), + 25: .same(proto: "type"), + 26: .standard(proto: "all_of"), + 27: .standard(proto: "one_of"), + 28: .standard(proto: "any_of"), + 29: .same(proto: "not"), + 30: .same(proto: "items"), + 31: .same(proto: "properties"), + 32: .standard(proto: "additional_properties"), + 33: .same(proto: "default"), + 34: .same(proto: "description"), + 35: .same(proto: "format"), + 36: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _nullable: Bool = false + var _discriminator: Openapi_V3_Discriminator? = nil + var _readOnly: Bool = false + var _writeOnly: Bool = false + var _xml: Openapi_V3_Xml? = nil + var _externalDocs: Openapi_V3_ExternalDocs? = nil + var _example: Openapi_V3_Any? = nil + var _deprecated: Bool = false + var _title: String = String() + var _multipleOf: Double = 0 + var _maximum: Double = 0 + var _exclusiveMaximum: Bool = false + var _minimum: Double = 0 + var _exclusiveMinimum: Bool = false + var _maxLength: Int64 = 0 + var _minLength: Int64 = 0 + var _pattern: String = String() + var _maxItems: Int64 = 0 + var _minItems: Int64 = 0 + var _uniqueItems: Bool = false + var _maxProperties: Int64 = 0 + var _minProperties: Int64 = 0 + var _required: [String] = [] + var _enum: [Openapi_V3_Any] = [] + var _type: String = String() + var _allOf: [Openapi_V3_SchemaOrReference] = [] + var _oneOf: [Openapi_V3_SchemaOrReference] = [] + var _anyOf: [Openapi_V3_SchemaOrReference] = [] + var _not: Openapi_V3_Schema? = nil + var _items: Openapi_V3_ItemsItem? = nil + var _properties: Openapi_V3_Properties? = nil + var _additionalProperties: Openapi_V3_AdditionalPropertiesItem? = nil + var _default: Openapi_V3_DefaultType? = nil + var _description_p: String = String() + var _format: String = String() + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _nullable = source._nullable + _discriminator = source._discriminator + _readOnly = source._readOnly + _writeOnly = source._writeOnly + _xml = source._xml + _externalDocs = source._externalDocs + _example = source._example + _deprecated = source._deprecated + _title = source._title + _multipleOf = source._multipleOf + _maximum = source._maximum + _exclusiveMaximum = source._exclusiveMaximum + _minimum = source._minimum + _exclusiveMinimum = source._exclusiveMinimum + _maxLength = source._maxLength + _minLength = source._minLength + _pattern = source._pattern + _maxItems = source._maxItems + _minItems = source._minItems + _uniqueItems = source._uniqueItems + _maxProperties = source._maxProperties + _minProperties = source._minProperties + _required = source._required + _enum = source._enum + _type = source._type + _allOf = source._allOf + _oneOf = source._oneOf + _anyOf = source._anyOf + _not = source._not + _items = source._items + _properties = source._properties + _additionalProperties = source._additionalProperties + _default = source._default + _description_p = source._description_p + _format = source._format + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Schema) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._nullable != other_storage._nullable {return false} + if _storage._discriminator != other_storage._discriminator {return false} + if _storage._readOnly != other_storage._readOnly {return false} + if _storage._writeOnly != other_storage._writeOnly {return false} + if _storage._xml != other_storage._xml {return false} + if _storage._externalDocs != other_storage._externalDocs {return false} + if _storage._example != other_storage._example {return false} + if _storage._deprecated != other_storage._deprecated {return false} + if _storage._title != other_storage._title {return false} + if _storage._multipleOf != other_storage._multipleOf {return false} + if _storage._maximum != other_storage._maximum {return false} + if _storage._exclusiveMaximum != other_storage._exclusiveMaximum {return false} + if _storage._minimum != other_storage._minimum {return false} + if _storage._exclusiveMinimum != other_storage._exclusiveMinimum {return false} + if _storage._maxLength != other_storage._maxLength {return false} + if _storage._minLength != other_storage._minLength {return false} + if _storage._pattern != other_storage._pattern {return false} + if _storage._maxItems != other_storage._maxItems {return false} + if _storage._minItems != other_storage._minItems {return false} + if _storage._uniqueItems != other_storage._uniqueItems {return false} + if _storage._maxProperties != other_storage._maxProperties {return false} + if _storage._minProperties != other_storage._minProperties {return false} + if _storage._required != other_storage._required {return false} + if _storage._enum != other_storage._enum {return false} + if _storage._type != other_storage._type {return false} + if _storage._allOf != other_storage._allOf {return false} + if _storage._oneOf != other_storage._oneOf {return false} + if _storage._anyOf != other_storage._anyOf {return false} + if _storage._not != other_storage._not {return false} + if _storage._items != other_storage._items {return false} + if _storage._properties != other_storage._properties {return false} + if _storage._additionalProperties != other_storage._additionalProperties {return false} + if _storage._default != other_storage._default {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._format != other_storage._format {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_SchemaOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "schema"), + 2: .same(proto: "reference"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V3_SchemaOrReference.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_SchemaOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_SchemasOrReferences: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_SchemasOrReferences) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_SecurityRequirement: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap = SwiftProtobuf._NameMap() + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_SecurityRequirement) -> Bool { + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_SecurityScheme: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "type"), + 2: .same(proto: "description"), + 3: .same(proto: "name"), + 4: .same(proto: "in"), + 5: .same(proto: "scheme"), + 6: .standard(proto: "bearer_format"), + 7: .same(proto: "flows"), + 8: .standard(proto: "open_id_connect_url"), + 9: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _type: String = String() + var _description_p: String = String() + var _name: String = String() + var _in: String = String() + var _scheme: String = String() + var _bearerFormat: String = String() + var _flows: Openapi_V3_OauthFlows? = nil + var _openIDConnectURL: String = String() + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _type = source._type + _description_p = source._description_p + _name = source._name + _in = source._in + _scheme = source._scheme + _bearerFormat = source._bearerFormat + _flows = source._flows + _openIDConnectURL = source._openIDConnectURL + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_SecurityScheme) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._type != other_storage._type {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._name != other_storage._name {return false} + if _storage._in != other_storage._in {return false} + if _storage._scheme != other_storage._scheme {return false} + if _storage._bearerFormat != other_storage._bearerFormat {return false} + if _storage._flows != other_storage._flows {return false} + if _storage._openIDConnectURL != other_storage._openIDConnectURL {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_SecuritySchemeOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "security_scheme"), + 2: .same(proto: "reference"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V3_SecuritySchemeOrReference.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_SecuritySchemeOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_SecuritySchemesOrReferences: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_SecuritySchemesOrReferences) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Server: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "url"), + 2: .same(proto: "description"), + 3: .same(proto: "variables"), + 4: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _url: String = String() + var _description_p: String = String() + var _variables: Openapi_V3_ServerVariables? = nil + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _url = source._url + _description_p = source._description_p + _variables = source._variables + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Server) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._url != other_storage._url {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._variables != other_storage._variables {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_ServerVariable: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "enum"), + 2: .same(proto: "default"), + 3: .same(proto: "description"), + 4: .standard(proto: "specification_extension"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_ServerVariable) -> Bool { + if self.`enum` != other.`enum` {return false} + if self.`default` != other.`default` {return false} + if self.description_p != other.description_p {return false} + if self.specificationExtension != other.specificationExtension {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_ServerVariables: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_ServerVariables) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_SpecificationExtension: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "number"), + 2: .same(proto: "boolean"), + 3: .same(proto: "string"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_SpecificationExtension) -> Bool { + if self.oneof != other.oneof {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_StringArray: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "value"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_StringArray) -> Bool { + if self.value != other.value {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Strings: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Strings) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Tag: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "description"), + 3: .standard(proto: "external_docs"), + 4: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _description_p: String = String() + var _externalDocs: Openapi_V3_ExternalDocs? = nil + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _description_p = source._description_p + _externalDocs = source._externalDocs + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Tag) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._externalDocs != other_storage._externalDocs {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Xml: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "namespace"), + 3: .same(proto: "prefix"), + 4: .same(proto: "attribute"), + 5: .same(proto: "wrapped"), + 6: .standard(proto: "specification_extension"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Xml) -> Bool { + if self.name != other.name {return false} + if self.namespace != other.namespace {return false} + if self.prefix != other.prefix {return false} + if self.attribute != other.attribute {return false} + if self.wrapped != other.wrapped {return false} + if self.specificationExtension != other.specificationExtension {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/Gnostic/discovery.pb.swift b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/Gnostic/discovery.pb.swift new file mode 100644 index 000000000..dcca2ce0c --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/Gnostic/discovery.pb.swift @@ -0,0 +1,3195 @@ +// DO NOT EDIT. +// +// Generated by the Swift generator plugin for the protocol buffer compiler. +// Source: github.com/googleapis/gnostic/discovery/discovery.proto +// +// For information on using the generated types, please see the documenation: +// https://github.com/apple/swift-protobuf/ + +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// THIS FILE IS AUTOMATICALLY GENERATED. + +import Foundation +import SwiftProtobuf + +// If the compiler emits an error on this type, it is because this file +// was generated by a version of the `protoc` Swift plug-in that is +// incompatible with the version of SwiftProtobuf to which you are linking. +// Please ensure that your are building against the same version of the API +// that was used to generate this file. +fileprivate struct _GeneratedWithProtocGenSwiftVersion: SwiftProtobuf.ProtobufAPIVersionCheck { + struct _2: SwiftProtobuf.ProtobufAPIVersion_2 {} + typealias Version = _2 +} + +public struct Discovery_V1_Annotations: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Annotations" + + public var required: [String] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedStringField(value: &self.required) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.required.isEmpty { + try visitor.visitRepeatedStringField(value: self.required, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Discovery_V1_Any: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Any" + + public var value: SwiftProtobuf.Google_Protobuf_Any { + get {return _storage._value ?? SwiftProtobuf.Google_Protobuf_Any()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var yaml: String { + get {return _storage._yaml} + set {_uniqueStorage()._yaml = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularMessageField(value: &_storage._value) + case 2: try decoder.decodeSingularStringField(value: &_storage._yaml) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + } + if !_storage._yaml.isEmpty { + try visitor.visitSingularStringField(value: _storage._yaml, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Discovery_V1_Auth: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Auth" + + public var oauth2: Discovery_V1_Oauth2 { + get {return _storage._oauth2 ?? Discovery_V1_Oauth2()} + set {_uniqueStorage()._oauth2 = newValue} + } + /// Returns true if `oauth2` has been explicitly set. + public var hasOauth2: Bool {return _storage._oauth2 != nil} + /// Clears the value of `oauth2`. Subsequent reads from it will return its default value. + public mutating func clearOauth2() {_storage._oauth2 = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularMessageField(value: &_storage._oauth2) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if let v = _storage._oauth2 { + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Discovery_V1_Document: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Document" + + public var kind: String { + get {return _storage._kind} + set {_uniqueStorage()._kind = newValue} + } + + public var discoveryVersion: String { + get {return _storage._discoveryVersion} + set {_uniqueStorage()._discoveryVersion = newValue} + } + + public var id: String { + get {return _storage._id} + set {_uniqueStorage()._id = newValue} + } + + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + public var version: String { + get {return _storage._version} + set {_uniqueStorage()._version = newValue} + } + + public var revision: String { + get {return _storage._revision} + set {_uniqueStorage()._revision = newValue} + } + + public var title: String { + get {return _storage._title} + set {_uniqueStorage()._title = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var icons: Discovery_V1_Icons { + get {return _storage._icons ?? Discovery_V1_Icons()} + set {_uniqueStorage()._icons = newValue} + } + /// Returns true if `icons` has been explicitly set. + public var hasIcons: Bool {return _storage._icons != nil} + /// Clears the value of `icons`. Subsequent reads from it will return its default value. + public mutating func clearIcons() {_storage._icons = nil} + + public var documentationLink: String { + get {return _storage._documentationLink} + set {_uniqueStorage()._documentationLink = newValue} + } + + public var labels: [String] { + get {return _storage._labels} + set {_uniqueStorage()._labels = newValue} + } + + public var `protocol`: String { + get {return _storage._protocol} + set {_uniqueStorage()._protocol = newValue} + } + + public var baseURL: String { + get {return _storage._baseURL} + set {_uniqueStorage()._baseURL = newValue} + } + + public var basePath: String { + get {return _storage._basePath} + set {_uniqueStorage()._basePath = newValue} + } + + public var rootURL: String { + get {return _storage._rootURL} + set {_uniqueStorage()._rootURL = newValue} + } + + public var servicePath: String { + get {return _storage._servicePath} + set {_uniqueStorage()._servicePath = newValue} + } + + public var batchPath: String { + get {return _storage._batchPath} + set {_uniqueStorage()._batchPath = newValue} + } + + public var parameters: Discovery_V1_Parameters { + get {return _storage._parameters ?? Discovery_V1_Parameters()} + set {_uniqueStorage()._parameters = newValue} + } + /// Returns true if `parameters` has been explicitly set. + public var hasParameters: Bool {return _storage._parameters != nil} + /// Clears the value of `parameters`. Subsequent reads from it will return its default value. + public mutating func clearParameters() {_storage._parameters = nil} + + public var auth: Discovery_V1_Auth { + get {return _storage._auth ?? Discovery_V1_Auth()} + set {_uniqueStorage()._auth = newValue} + } + /// Returns true if `auth` has been explicitly set. + public var hasAuth: Bool {return _storage._auth != nil} + /// Clears the value of `auth`. Subsequent reads from it will return its default value. + public mutating func clearAuth() {_storage._auth = nil} + + public var features: [String] { + get {return _storage._features} + set {_uniqueStorage()._features = newValue} + } + + public var schemas: Discovery_V1_Schemas { + get {return _storage._schemas ?? Discovery_V1_Schemas()} + set {_uniqueStorage()._schemas = newValue} + } + /// Returns true if `schemas` has been explicitly set. + public var hasSchemas: Bool {return _storage._schemas != nil} + /// Clears the value of `schemas`. Subsequent reads from it will return its default value. + public mutating func clearSchemas() {_storage._schemas = nil} + + public var methods: Discovery_V1_Methods { + get {return _storage._methods ?? Discovery_V1_Methods()} + set {_uniqueStorage()._methods = newValue} + } + /// Returns true if `methods` has been explicitly set. + public var hasMethods: Bool {return _storage._methods != nil} + /// Clears the value of `methods`. Subsequent reads from it will return its default value. + public mutating func clearMethods() {_storage._methods = nil} + + public var resources: Discovery_V1_Resources { + get {return _storage._resources ?? Discovery_V1_Resources()} + set {_uniqueStorage()._resources = newValue} + } + /// Returns true if `resources` has been explicitly set. + public var hasResources: Bool {return _storage._resources != nil} + /// Clears the value of `resources`. Subsequent reads from it will return its default value. + public mutating func clearResources() {_storage._resources = nil} + + public var etag: String { + get {return _storage._etag} + set {_uniqueStorage()._etag = newValue} + } + + public var ownerDomain: String { + get {return _storage._ownerDomain} + set {_uniqueStorage()._ownerDomain = newValue} + } + + public var ownerName: String { + get {return _storage._ownerName} + set {_uniqueStorage()._ownerName = newValue} + } + + public var versionModule: Bool { + get {return _storage._versionModule} + set {_uniqueStorage()._versionModule = newValue} + } + + public var canonicalName: String { + get {return _storage._canonicalName} + set {_uniqueStorage()._canonicalName = newValue} + } + + public var fullyEncodeReservedExpansion: Bool { + get {return _storage._fullyEncodeReservedExpansion} + set {_uniqueStorage()._fullyEncodeReservedExpansion = newValue} + } + + public var packagePath: String { + get {return _storage._packagePath} + set {_uniqueStorage()._packagePath = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._kind) + case 2: try decoder.decodeSingularStringField(value: &_storage._discoveryVersion) + case 3: try decoder.decodeSingularStringField(value: &_storage._id) + case 4: try decoder.decodeSingularStringField(value: &_storage._name) + case 5: try decoder.decodeSingularStringField(value: &_storage._version) + case 6: try decoder.decodeSingularStringField(value: &_storage._revision) + case 7: try decoder.decodeSingularStringField(value: &_storage._title) + case 8: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 9: try decoder.decodeSingularMessageField(value: &_storage._icons) + case 10: try decoder.decodeSingularStringField(value: &_storage._documentationLink) + case 11: try decoder.decodeRepeatedStringField(value: &_storage._labels) + case 12: try decoder.decodeSingularStringField(value: &_storage._protocol) + case 13: try decoder.decodeSingularStringField(value: &_storage._baseURL) + case 14: try decoder.decodeSingularStringField(value: &_storage._basePath) + case 15: try decoder.decodeSingularStringField(value: &_storage._rootURL) + case 16: try decoder.decodeSingularStringField(value: &_storage._servicePath) + case 17: try decoder.decodeSingularStringField(value: &_storage._batchPath) + case 18: try decoder.decodeSingularMessageField(value: &_storage._parameters) + case 19: try decoder.decodeSingularMessageField(value: &_storage._auth) + case 20: try decoder.decodeRepeatedStringField(value: &_storage._features) + case 21: try decoder.decodeSingularMessageField(value: &_storage._schemas) + case 22: try decoder.decodeSingularMessageField(value: &_storage._methods) + case 23: try decoder.decodeSingularMessageField(value: &_storage._resources) + case 24: try decoder.decodeSingularStringField(value: &_storage._etag) + case 25: try decoder.decodeSingularStringField(value: &_storage._ownerDomain) + case 26: try decoder.decodeSingularStringField(value: &_storage._ownerName) + case 27: try decoder.decodeSingularBoolField(value: &_storage._versionModule) + case 28: try decoder.decodeSingularStringField(value: &_storage._canonicalName) + case 29: try decoder.decodeSingularBoolField(value: &_storage._fullyEncodeReservedExpansion) + case 30: try decoder.decodeSingularStringField(value: &_storage._packagePath) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._kind.isEmpty { + try visitor.visitSingularStringField(value: _storage._kind, fieldNumber: 1) + } + if !_storage._discoveryVersion.isEmpty { + try visitor.visitSingularStringField(value: _storage._discoveryVersion, fieldNumber: 2) + } + if !_storage._id.isEmpty { + try visitor.visitSingularStringField(value: _storage._id, fieldNumber: 3) + } + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 4) + } + if !_storage._version.isEmpty { + try visitor.visitSingularStringField(value: _storage._version, fieldNumber: 5) + } + if !_storage._revision.isEmpty { + try visitor.visitSingularStringField(value: _storage._revision, fieldNumber: 6) + } + if !_storage._title.isEmpty { + try visitor.visitSingularStringField(value: _storage._title, fieldNumber: 7) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 8) + } + if let v = _storage._icons { + try visitor.visitSingularMessageField(value: v, fieldNumber: 9) + } + if !_storage._documentationLink.isEmpty { + try visitor.visitSingularStringField(value: _storage._documentationLink, fieldNumber: 10) + } + if !_storage._labels.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._labels, fieldNumber: 11) + } + if !_storage._protocol.isEmpty { + try visitor.visitSingularStringField(value: _storage._protocol, fieldNumber: 12) + } + if !_storage._baseURL.isEmpty { + try visitor.visitSingularStringField(value: _storage._baseURL, fieldNumber: 13) + } + if !_storage._basePath.isEmpty { + try visitor.visitSingularStringField(value: _storage._basePath, fieldNumber: 14) + } + if !_storage._rootURL.isEmpty { + try visitor.visitSingularStringField(value: _storage._rootURL, fieldNumber: 15) + } + if !_storage._servicePath.isEmpty { + try visitor.visitSingularStringField(value: _storage._servicePath, fieldNumber: 16) + } + if !_storage._batchPath.isEmpty { + try visitor.visitSingularStringField(value: _storage._batchPath, fieldNumber: 17) + } + if let v = _storage._parameters { + try visitor.visitSingularMessageField(value: v, fieldNumber: 18) + } + if let v = _storage._auth { + try visitor.visitSingularMessageField(value: v, fieldNumber: 19) + } + if !_storage._features.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._features, fieldNumber: 20) + } + if let v = _storage._schemas { + try visitor.visitSingularMessageField(value: v, fieldNumber: 21) + } + if let v = _storage._methods { + try visitor.visitSingularMessageField(value: v, fieldNumber: 22) + } + if let v = _storage._resources { + try visitor.visitSingularMessageField(value: v, fieldNumber: 23) + } + if !_storage._etag.isEmpty { + try visitor.visitSingularStringField(value: _storage._etag, fieldNumber: 24) + } + if !_storage._ownerDomain.isEmpty { + try visitor.visitSingularStringField(value: _storage._ownerDomain, fieldNumber: 25) + } + if !_storage._ownerName.isEmpty { + try visitor.visitSingularStringField(value: _storage._ownerName, fieldNumber: 26) + } + if _storage._versionModule != false { + try visitor.visitSingularBoolField(value: _storage._versionModule, fieldNumber: 27) + } + if !_storage._canonicalName.isEmpty { + try visitor.visitSingularStringField(value: _storage._canonicalName, fieldNumber: 28) + } + if _storage._fullyEncodeReservedExpansion != false { + try visitor.visitSingularBoolField(value: _storage._fullyEncodeReservedExpansion, fieldNumber: 29) + } + if !_storage._packagePath.isEmpty { + try visitor.visitSingularStringField(value: _storage._packagePath, fieldNumber: 30) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Icons that represent the API. +public struct Discovery_V1_Icons: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Icons" + + public var x16: String = String() + + public var x32: String = String() + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.x16) + case 2: try decoder.decodeSingularStringField(value: &self.x32) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.x16.isEmpty { + try visitor.visitSingularStringField(value: self.x16, fieldNumber: 1) + } + if !self.x32.isEmpty { + try visitor.visitSingularStringField(value: self.x32, fieldNumber: 2) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Discovery_V1_MediaUpload: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".MediaUpload" + + public var accept: [String] { + get {return _storage._accept} + set {_uniqueStorage()._accept = newValue} + } + + public var maxSize: String { + get {return _storage._maxSize} + set {_uniqueStorage()._maxSize = newValue} + } + + public var protocols: Discovery_V1_Protocols { + get {return _storage._protocols ?? Discovery_V1_Protocols()} + set {_uniqueStorage()._protocols = newValue} + } + /// Returns true if `protocols` has been explicitly set. + public var hasProtocols: Bool {return _storage._protocols != nil} + /// Clears the value of `protocols`. Subsequent reads from it will return its default value. + public mutating func clearProtocols() {_storage._protocols = nil} + + public var supportsSubscription: Bool { + get {return _storage._supportsSubscription} + set {_uniqueStorage()._supportsSubscription = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedStringField(value: &_storage._accept) + case 2: try decoder.decodeSingularStringField(value: &_storage._maxSize) + case 3: try decoder.decodeSingularMessageField(value: &_storage._protocols) + case 4: try decoder.decodeSingularBoolField(value: &_storage._supportsSubscription) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._accept.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._accept, fieldNumber: 1) + } + if !_storage._maxSize.isEmpty { + try visitor.visitSingularStringField(value: _storage._maxSize, fieldNumber: 2) + } + if let v = _storage._protocols { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if _storage._supportsSubscription != false { + try visitor.visitSingularBoolField(value: _storage._supportsSubscription, fieldNumber: 4) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Discovery_V1_Method: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Method" + + public var id: String { + get {return _storage._id} + set {_uniqueStorage()._id = newValue} + } + + public var path: String { + get {return _storage._path} + set {_uniqueStorage()._path = newValue} + } + + public var httpMethod: String { + get {return _storage._httpMethod} + set {_uniqueStorage()._httpMethod = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var parameters: Discovery_V1_Parameters { + get {return _storage._parameters ?? Discovery_V1_Parameters()} + set {_uniqueStorage()._parameters = newValue} + } + /// Returns true if `parameters` has been explicitly set. + public var hasParameters: Bool {return _storage._parameters != nil} + /// Clears the value of `parameters`. Subsequent reads from it will return its default value. + public mutating func clearParameters() {_storage._parameters = nil} + + public var parameterOrder: [String] { + get {return _storage._parameterOrder} + set {_uniqueStorage()._parameterOrder = newValue} + } + + public var request: Discovery_V1_Request { + get {return _storage._request ?? Discovery_V1_Request()} + set {_uniqueStorage()._request = newValue} + } + /// Returns true if `request` has been explicitly set. + public var hasRequest: Bool {return _storage._request != nil} + /// Clears the value of `request`. Subsequent reads from it will return its default value. + public mutating func clearRequest() {_storage._request = nil} + + public var response: Discovery_V1_Response { + get {return _storage._response ?? Discovery_V1_Response()} + set {_uniqueStorage()._response = newValue} + } + /// Returns true if `response` has been explicitly set. + public var hasResponse: Bool {return _storage._response != nil} + /// Clears the value of `response`. Subsequent reads from it will return its default value. + public mutating func clearResponse() {_storage._response = nil} + + public var scopes: [String] { + get {return _storage._scopes} + set {_uniqueStorage()._scopes = newValue} + } + + public var supportsMediaDownload: Bool { + get {return _storage._supportsMediaDownload} + set {_uniqueStorage()._supportsMediaDownload = newValue} + } + + public var supportsMediaUpload: Bool { + get {return _storage._supportsMediaUpload} + set {_uniqueStorage()._supportsMediaUpload = newValue} + } + + public var useMediaDownloadService: Bool { + get {return _storage._useMediaDownloadService} + set {_uniqueStorage()._useMediaDownloadService = newValue} + } + + public var mediaUpload: Discovery_V1_MediaUpload { + get {return _storage._mediaUpload ?? Discovery_V1_MediaUpload()} + set {_uniqueStorage()._mediaUpload = newValue} + } + /// Returns true if `mediaUpload` has been explicitly set. + public var hasMediaUpload: Bool {return _storage._mediaUpload != nil} + /// Clears the value of `mediaUpload`. Subsequent reads from it will return its default value. + public mutating func clearMediaUpload() {_storage._mediaUpload = nil} + + public var supportsSubscription: Bool { + get {return _storage._supportsSubscription} + set {_uniqueStorage()._supportsSubscription = newValue} + } + + public var flatPath: String { + get {return _storage._flatPath} + set {_uniqueStorage()._flatPath = newValue} + } + + public var etagRequired: Bool { + get {return _storage._etagRequired} + set {_uniqueStorage()._etagRequired = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._id) + case 2: try decoder.decodeSingularStringField(value: &_storage._path) + case 3: try decoder.decodeSingularStringField(value: &_storage._httpMethod) + case 4: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 5: try decoder.decodeSingularMessageField(value: &_storage._parameters) + case 6: try decoder.decodeRepeatedStringField(value: &_storage._parameterOrder) + case 7: try decoder.decodeSingularMessageField(value: &_storage._request) + case 8: try decoder.decodeSingularMessageField(value: &_storage._response) + case 9: try decoder.decodeRepeatedStringField(value: &_storage._scopes) + case 10: try decoder.decodeSingularBoolField(value: &_storage._supportsMediaDownload) + case 11: try decoder.decodeSingularBoolField(value: &_storage._supportsMediaUpload) + case 12: try decoder.decodeSingularBoolField(value: &_storage._useMediaDownloadService) + case 13: try decoder.decodeSingularMessageField(value: &_storage._mediaUpload) + case 14: try decoder.decodeSingularBoolField(value: &_storage._supportsSubscription) + case 15: try decoder.decodeSingularStringField(value: &_storage._flatPath) + case 16: try decoder.decodeSingularBoolField(value: &_storage._etagRequired) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._id.isEmpty { + try visitor.visitSingularStringField(value: _storage._id, fieldNumber: 1) + } + if !_storage._path.isEmpty { + try visitor.visitSingularStringField(value: _storage._path, fieldNumber: 2) + } + if !_storage._httpMethod.isEmpty { + try visitor.visitSingularStringField(value: _storage._httpMethod, fieldNumber: 3) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 4) + } + if let v = _storage._parameters { + try visitor.visitSingularMessageField(value: v, fieldNumber: 5) + } + if !_storage._parameterOrder.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._parameterOrder, fieldNumber: 6) + } + if let v = _storage._request { + try visitor.visitSingularMessageField(value: v, fieldNumber: 7) + } + if let v = _storage._response { + try visitor.visitSingularMessageField(value: v, fieldNumber: 8) + } + if !_storage._scopes.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._scopes, fieldNumber: 9) + } + if _storage._supportsMediaDownload != false { + try visitor.visitSingularBoolField(value: _storage._supportsMediaDownload, fieldNumber: 10) + } + if _storage._supportsMediaUpload != false { + try visitor.visitSingularBoolField(value: _storage._supportsMediaUpload, fieldNumber: 11) + } + if _storage._useMediaDownloadService != false { + try visitor.visitSingularBoolField(value: _storage._useMediaDownloadService, fieldNumber: 12) + } + if let v = _storage._mediaUpload { + try visitor.visitSingularMessageField(value: v, fieldNumber: 13) + } + if _storage._supportsSubscription != false { + try visitor.visitSingularBoolField(value: _storage._supportsSubscription, fieldNumber: 14) + } + if !_storage._flatPath.isEmpty { + try visitor.visitSingularStringField(value: _storage._flatPath, fieldNumber: 15) + } + if _storage._etagRequired != false { + try visitor.visitSingularBoolField(value: _storage._etagRequired, fieldNumber: 16) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Discovery_V1_Methods: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Methods" + + public var additionalProperties: [Discovery_V1_NamedMethod] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Automatically-generated message used to represent maps of Method as ordered (name,value) pairs. +public struct Discovery_V1_NamedMethod: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedMethod" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Discovery_V1_Method { + get {return _storage._value ?? Discovery_V1_Method()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of Parameter as ordered (name,value) pairs. +public struct Discovery_V1_NamedParameter: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedParameter" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Discovery_V1_Parameter { + get {return _storage._value ?? Discovery_V1_Parameter()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of Resource as ordered (name,value) pairs. +public struct Discovery_V1_NamedResource: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedResource" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Discovery_V1_Resource { + get {return _storage._value ?? Discovery_V1_Resource()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of Schema as ordered (name,value) pairs. +public struct Discovery_V1_NamedSchema: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedSchema" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Discovery_V1_Schema { + get {return _storage._value ?? Discovery_V1_Schema()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of Scope as ordered (name,value) pairs. +public struct Discovery_V1_NamedScope: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedScope" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Discovery_V1_Scope { + get {return _storage._value ?? Discovery_V1_Scope()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Discovery_V1_Oauth2: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Oauth2" + + public var scopes: Discovery_V1_Scopes { + get {return _storage._scopes ?? Discovery_V1_Scopes()} + set {_uniqueStorage()._scopes = newValue} + } + /// Returns true if `scopes` has been explicitly set. + public var hasScopes: Bool {return _storage._scopes != nil} + /// Clears the value of `scopes`. Subsequent reads from it will return its default value. + public mutating func clearScopes() {_storage._scopes = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularMessageField(value: &_storage._scopes) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if let v = _storage._scopes { + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Discovery_V1_Parameter: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Parameter" + + public var id: String { + get {return _storage._id} + set {_uniqueStorage()._id = newValue} + } + + public var type: String { + get {return _storage._type} + set {_uniqueStorage()._type = newValue} + } + + public var ref: String { + get {return _storage._ref} + set {_uniqueStorage()._ref = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var `default`: String { + get {return _storage._default} + set {_uniqueStorage()._default = newValue} + } + + public var required: Bool { + get {return _storage._required} + set {_uniqueStorage()._required = newValue} + } + + public var format: String { + get {return _storage._format} + set {_uniqueStorage()._format = newValue} + } + + public var pattern: String { + get {return _storage._pattern} + set {_uniqueStorage()._pattern = newValue} + } + + public var minimum: String { + get {return _storage._minimum} + set {_uniqueStorage()._minimum = newValue} + } + + public var maximum: String { + get {return _storage._maximum} + set {_uniqueStorage()._maximum = newValue} + } + + public var `enum`: [String] { + get {return _storage._enum} + set {_uniqueStorage()._enum = newValue} + } + + public var enumDescriptions: [String] { + get {return _storage._enumDescriptions} + set {_uniqueStorage()._enumDescriptions = newValue} + } + + public var repeated: Bool { + get {return _storage._repeated} + set {_uniqueStorage()._repeated = newValue} + } + + public var location: String { + get {return _storage._location} + set {_uniqueStorage()._location = newValue} + } + + public var properties: Discovery_V1_Schemas { + get {return _storage._properties ?? Discovery_V1_Schemas()} + set {_uniqueStorage()._properties = newValue} + } + /// Returns true if `properties` has been explicitly set. + public var hasProperties: Bool {return _storage._properties != nil} + /// Clears the value of `properties`. Subsequent reads from it will return its default value. + public mutating func clearProperties() {_storage._properties = nil} + + public var additionalProperties: Discovery_V1_Schema { + get {return _storage._additionalProperties ?? Discovery_V1_Schema()} + set {_uniqueStorage()._additionalProperties = newValue} + } + /// Returns true if `additionalProperties` has been explicitly set. + public var hasAdditionalProperties: Bool {return _storage._additionalProperties != nil} + /// Clears the value of `additionalProperties`. Subsequent reads from it will return its default value. + public mutating func clearAdditionalProperties() {_storage._additionalProperties = nil} + + public var items: Discovery_V1_Schema { + get {return _storage._items ?? Discovery_V1_Schema()} + set {_uniqueStorage()._items = newValue} + } + /// Returns true if `items` has been explicitly set. + public var hasItems: Bool {return _storage._items != nil} + /// Clears the value of `items`. Subsequent reads from it will return its default value. + public mutating func clearItems() {_storage._items = nil} + + public var annotations: Discovery_V1_Annotations { + get {return _storage._annotations ?? Discovery_V1_Annotations()} + set {_uniqueStorage()._annotations = newValue} + } + /// Returns true if `annotations` has been explicitly set. + public var hasAnnotations: Bool {return _storage._annotations != nil} + /// Clears the value of `annotations`. Subsequent reads from it will return its default value. + public mutating func clearAnnotations() {_storage._annotations = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._id) + case 2: try decoder.decodeSingularStringField(value: &_storage._type) + case 3: try decoder.decodeSingularStringField(value: &_storage._ref) + case 4: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 5: try decoder.decodeSingularStringField(value: &_storage._default) + case 6: try decoder.decodeSingularBoolField(value: &_storage._required) + case 7: try decoder.decodeSingularStringField(value: &_storage._format) + case 8: try decoder.decodeSingularStringField(value: &_storage._pattern) + case 9: try decoder.decodeSingularStringField(value: &_storage._minimum) + case 10: try decoder.decodeSingularStringField(value: &_storage._maximum) + case 11: try decoder.decodeRepeatedStringField(value: &_storage._enum) + case 12: try decoder.decodeRepeatedStringField(value: &_storage._enumDescriptions) + case 13: try decoder.decodeSingularBoolField(value: &_storage._repeated) + case 14: try decoder.decodeSingularStringField(value: &_storage._location) + case 15: try decoder.decodeSingularMessageField(value: &_storage._properties) + case 16: try decoder.decodeSingularMessageField(value: &_storage._additionalProperties) + case 17: try decoder.decodeSingularMessageField(value: &_storage._items) + case 18: try decoder.decodeSingularMessageField(value: &_storage._annotations) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._id.isEmpty { + try visitor.visitSingularStringField(value: _storage._id, fieldNumber: 1) + } + if !_storage._type.isEmpty { + try visitor.visitSingularStringField(value: _storage._type, fieldNumber: 2) + } + if !_storage._ref.isEmpty { + try visitor.visitSingularStringField(value: _storage._ref, fieldNumber: 3) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 4) + } + if !_storage._default.isEmpty { + try visitor.visitSingularStringField(value: _storage._default, fieldNumber: 5) + } + if _storage._required != false { + try visitor.visitSingularBoolField(value: _storage._required, fieldNumber: 6) + } + if !_storage._format.isEmpty { + try visitor.visitSingularStringField(value: _storage._format, fieldNumber: 7) + } + if !_storage._pattern.isEmpty { + try visitor.visitSingularStringField(value: _storage._pattern, fieldNumber: 8) + } + if !_storage._minimum.isEmpty { + try visitor.visitSingularStringField(value: _storage._minimum, fieldNumber: 9) + } + if !_storage._maximum.isEmpty { + try visitor.visitSingularStringField(value: _storage._maximum, fieldNumber: 10) + } + if !_storage._enum.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._enum, fieldNumber: 11) + } + if !_storage._enumDescriptions.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._enumDescriptions, fieldNumber: 12) + } + if _storage._repeated != false { + try visitor.visitSingularBoolField(value: _storage._repeated, fieldNumber: 13) + } + if !_storage._location.isEmpty { + try visitor.visitSingularStringField(value: _storage._location, fieldNumber: 14) + } + if let v = _storage._properties { + try visitor.visitSingularMessageField(value: v, fieldNumber: 15) + } + if let v = _storage._additionalProperties { + try visitor.visitSingularMessageField(value: v, fieldNumber: 16) + } + if let v = _storage._items { + try visitor.visitSingularMessageField(value: v, fieldNumber: 17) + } + if let v = _storage._annotations { + try visitor.visitSingularMessageField(value: v, fieldNumber: 18) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Discovery_V1_Parameters: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Parameters" + + public var additionalProperties: [Discovery_V1_NamedParameter] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Discovery_V1_Protocols: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Protocols" + + public var simple: Discovery_V1_Simple { + get {return _storage._simple ?? Discovery_V1_Simple()} + set {_uniqueStorage()._simple = newValue} + } + /// Returns true if `simple` has been explicitly set. + public var hasSimple: Bool {return _storage._simple != nil} + /// Clears the value of `simple`. Subsequent reads from it will return its default value. + public mutating func clearSimple() {_storage._simple = nil} + + public var resumable: Discovery_V1_Resumable { + get {return _storage._resumable ?? Discovery_V1_Resumable()} + set {_uniqueStorage()._resumable = newValue} + } + /// Returns true if `resumable` has been explicitly set. + public var hasResumable: Bool {return _storage._resumable != nil} + /// Clears the value of `resumable`. Subsequent reads from it will return its default value. + public mutating func clearResumable() {_storage._resumable = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularMessageField(value: &_storage._simple) + case 2: try decoder.decodeSingularMessageField(value: &_storage._resumable) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if let v = _storage._simple { + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + } + if let v = _storage._resumable { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Discovery_V1_Request: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Request" + + public var ref: String = String() + + public var parameterName: String = String() + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.ref) + case 2: try decoder.decodeSingularStringField(value: &self.parameterName) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.ref.isEmpty { + try visitor.visitSingularStringField(value: self.ref, fieldNumber: 1) + } + if !self.parameterName.isEmpty { + try visitor.visitSingularStringField(value: self.parameterName, fieldNumber: 2) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Discovery_V1_Resource: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Resource" + + public var methods: Discovery_V1_Methods { + get {return _storage._methods ?? Discovery_V1_Methods()} + set {_uniqueStorage()._methods = newValue} + } + /// Returns true if `methods` has been explicitly set. + public var hasMethods: Bool {return _storage._methods != nil} + /// Clears the value of `methods`. Subsequent reads from it will return its default value. + public mutating func clearMethods() {_storage._methods = nil} + + public var resources: Discovery_V1_Resources { + get {return _storage._resources ?? Discovery_V1_Resources()} + set {_uniqueStorage()._resources = newValue} + } + /// Returns true if `resources` has been explicitly set. + public var hasResources: Bool {return _storage._resources != nil} + /// Clears the value of `resources`. Subsequent reads from it will return its default value. + public mutating func clearResources() {_storage._resources = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularMessageField(value: &_storage._methods) + case 2: try decoder.decodeSingularMessageField(value: &_storage._resources) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if let v = _storage._methods { + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + } + if let v = _storage._resources { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Discovery_V1_Resources: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Resources" + + public var additionalProperties: [Discovery_V1_NamedResource] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Discovery_V1_Response: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Response" + + public var ref: String = String() + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.ref) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.ref.isEmpty { + try visitor.visitSingularStringField(value: self.ref, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Discovery_V1_Resumable: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Resumable" + + public var multipart: Bool = false + + public var path: String = String() + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularBoolField(value: &self.multipart) + case 2: try decoder.decodeSingularStringField(value: &self.path) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if self.multipart != false { + try visitor.visitSingularBoolField(value: self.multipart, fieldNumber: 1) + } + if !self.path.isEmpty { + try visitor.visitSingularStringField(value: self.path, fieldNumber: 2) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Discovery_V1_Schema: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Schema" + + public var id: String { + get {return _storage._id} + set {_uniqueStorage()._id = newValue} + } + + public var type: String { + get {return _storage._type} + set {_uniqueStorage()._type = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var `default`: String { + get {return _storage._default} + set {_uniqueStorage()._default = newValue} + } + + public var required: Bool { + get {return _storage._required} + set {_uniqueStorage()._required = newValue} + } + + public var format: String { + get {return _storage._format} + set {_uniqueStorage()._format = newValue} + } + + public var pattern: String { + get {return _storage._pattern} + set {_uniqueStorage()._pattern = newValue} + } + + public var minimum: String { + get {return _storage._minimum} + set {_uniqueStorage()._minimum = newValue} + } + + public var maximum: String { + get {return _storage._maximum} + set {_uniqueStorage()._maximum = newValue} + } + + public var `enum`: [String] { + get {return _storage._enum} + set {_uniqueStorage()._enum = newValue} + } + + public var enumDescriptions: [String] { + get {return _storage._enumDescriptions} + set {_uniqueStorage()._enumDescriptions = newValue} + } + + public var repeated: Bool { + get {return _storage._repeated} + set {_uniqueStorage()._repeated = newValue} + } + + public var location: String { + get {return _storage._location} + set {_uniqueStorage()._location = newValue} + } + + public var properties: Discovery_V1_Schemas { + get {return _storage._properties ?? Discovery_V1_Schemas()} + set {_uniqueStorage()._properties = newValue} + } + /// Returns true if `properties` has been explicitly set. + public var hasProperties: Bool {return _storage._properties != nil} + /// Clears the value of `properties`. Subsequent reads from it will return its default value. + public mutating func clearProperties() {_storage._properties = nil} + + public var additionalProperties: Discovery_V1_Schema { + get {return _storage._additionalProperties ?? Discovery_V1_Schema()} + set {_uniqueStorage()._additionalProperties = newValue} + } + /// Returns true if `additionalProperties` has been explicitly set. + public var hasAdditionalProperties: Bool {return _storage._additionalProperties != nil} + /// Clears the value of `additionalProperties`. Subsequent reads from it will return its default value. + public mutating func clearAdditionalProperties() {_storage._additionalProperties = nil} + + public var items: Discovery_V1_Schema { + get {return _storage._items ?? Discovery_V1_Schema()} + set {_uniqueStorage()._items = newValue} + } + /// Returns true if `items` has been explicitly set. + public var hasItems: Bool {return _storage._items != nil} + /// Clears the value of `items`. Subsequent reads from it will return its default value. + public mutating func clearItems() {_storage._items = nil} + + public var ref: String { + get {return _storage._ref} + set {_uniqueStorage()._ref = newValue} + } + + public var annotations: Discovery_V1_Annotations { + get {return _storage._annotations ?? Discovery_V1_Annotations()} + set {_uniqueStorage()._annotations = newValue} + } + /// Returns true if `annotations` has been explicitly set. + public var hasAnnotations: Bool {return _storage._annotations != nil} + /// Clears the value of `annotations`. Subsequent reads from it will return its default value. + public mutating func clearAnnotations() {_storage._annotations = nil} + + public var readOnly: Bool { + get {return _storage._readOnly} + set {_uniqueStorage()._readOnly = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._id) + case 2: try decoder.decodeSingularStringField(value: &_storage._type) + case 3: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 4: try decoder.decodeSingularStringField(value: &_storage._default) + case 5: try decoder.decodeSingularBoolField(value: &_storage._required) + case 6: try decoder.decodeSingularStringField(value: &_storage._format) + case 7: try decoder.decodeSingularStringField(value: &_storage._pattern) + case 8: try decoder.decodeSingularStringField(value: &_storage._minimum) + case 9: try decoder.decodeSingularStringField(value: &_storage._maximum) + case 10: try decoder.decodeRepeatedStringField(value: &_storage._enum) + case 11: try decoder.decodeRepeatedStringField(value: &_storage._enumDescriptions) + case 12: try decoder.decodeSingularBoolField(value: &_storage._repeated) + case 13: try decoder.decodeSingularStringField(value: &_storage._location) + case 14: try decoder.decodeSingularMessageField(value: &_storage._properties) + case 15: try decoder.decodeSingularMessageField(value: &_storage._additionalProperties) + case 16: try decoder.decodeSingularMessageField(value: &_storage._items) + case 17: try decoder.decodeSingularStringField(value: &_storage._ref) + case 18: try decoder.decodeSingularMessageField(value: &_storage._annotations) + case 19: try decoder.decodeSingularBoolField(value: &_storage._readOnly) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._id.isEmpty { + try visitor.visitSingularStringField(value: _storage._id, fieldNumber: 1) + } + if !_storage._type.isEmpty { + try visitor.visitSingularStringField(value: _storage._type, fieldNumber: 2) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 3) + } + if !_storage._default.isEmpty { + try visitor.visitSingularStringField(value: _storage._default, fieldNumber: 4) + } + if _storage._required != false { + try visitor.visitSingularBoolField(value: _storage._required, fieldNumber: 5) + } + if !_storage._format.isEmpty { + try visitor.visitSingularStringField(value: _storage._format, fieldNumber: 6) + } + if !_storage._pattern.isEmpty { + try visitor.visitSingularStringField(value: _storage._pattern, fieldNumber: 7) + } + if !_storage._minimum.isEmpty { + try visitor.visitSingularStringField(value: _storage._minimum, fieldNumber: 8) + } + if !_storage._maximum.isEmpty { + try visitor.visitSingularStringField(value: _storage._maximum, fieldNumber: 9) + } + if !_storage._enum.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._enum, fieldNumber: 10) + } + if !_storage._enumDescriptions.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._enumDescriptions, fieldNumber: 11) + } + if _storage._repeated != false { + try visitor.visitSingularBoolField(value: _storage._repeated, fieldNumber: 12) + } + if !_storage._location.isEmpty { + try visitor.visitSingularStringField(value: _storage._location, fieldNumber: 13) + } + if let v = _storage._properties { + try visitor.visitSingularMessageField(value: v, fieldNumber: 14) + } + if let v = _storage._additionalProperties { + try visitor.visitSingularMessageField(value: v, fieldNumber: 15) + } + if let v = _storage._items { + try visitor.visitSingularMessageField(value: v, fieldNumber: 16) + } + if !_storage._ref.isEmpty { + try visitor.visitSingularStringField(value: _storage._ref, fieldNumber: 17) + } + if let v = _storage._annotations { + try visitor.visitSingularMessageField(value: v, fieldNumber: 18) + } + if _storage._readOnly != false { + try visitor.visitSingularBoolField(value: _storage._readOnly, fieldNumber: 19) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Discovery_V1_Schemas: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Schemas" + + public var additionalProperties: [Discovery_V1_NamedSchema] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Discovery_V1_Scope: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Scope" + + public var description_p: String = String() + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.description_p) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.description_p.isEmpty { + try visitor.visitSingularStringField(value: self.description_p, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Discovery_V1_Scopes: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Scopes" + + public var additionalProperties: [Discovery_V1_NamedScope] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Discovery_V1_Simple: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Simple" + + public var multipart: Bool = false + + public var path: String = String() + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularBoolField(value: &self.multipart) + case 2: try decoder.decodeSingularStringField(value: &self.path) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if self.multipart != false { + try visitor.visitSingularBoolField(value: self.multipart, fieldNumber: 1) + } + if !self.path.isEmpty { + try visitor.visitSingularStringField(value: self.path, fieldNumber: 2) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Discovery_V1_StringArray: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".StringArray" + + public var value: [String] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedStringField(value: &self.value) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.value.isEmpty { + try visitor.visitRepeatedStringField(value: self.value, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +// MARK: - Code below here is support for the SwiftProtobuf runtime. + +fileprivate let _protobuf_package = "discovery.v1" + +extension Discovery_V1_Annotations: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "required"), + ] + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Annotations) -> Bool { + if self.required != other.required {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Any: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "value"), + 2: .same(proto: "yaml"), + ] + + fileprivate class _StorageClass { + var _value: SwiftProtobuf.Google_Protobuf_Any? = nil + var _yaml: String = String() + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _value = source._value + _yaml = source._yaml + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Any) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._value != other_storage._value {return false} + if _storage._yaml != other_storage._yaml {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Auth: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "oauth2"), + ] + + fileprivate class _StorageClass { + var _oauth2: Discovery_V1_Oauth2? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oauth2 = source._oauth2 + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Auth) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oauth2 != other_storage._oauth2 {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Document: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "kind"), + 2: .standard(proto: "discovery_version"), + 3: .same(proto: "id"), + 4: .same(proto: "name"), + 5: .same(proto: "version"), + 6: .same(proto: "revision"), + 7: .same(proto: "title"), + 8: .same(proto: "description"), + 9: .same(proto: "icons"), + 10: .standard(proto: "documentation_link"), + 11: .same(proto: "labels"), + 12: .same(proto: "protocol"), + 13: .standard(proto: "base_url"), + 14: .standard(proto: "base_path"), + 15: .standard(proto: "root_url"), + 16: .standard(proto: "service_path"), + 17: .standard(proto: "batch_path"), + 18: .same(proto: "parameters"), + 19: .same(proto: "auth"), + 20: .same(proto: "features"), + 21: .same(proto: "schemas"), + 22: .same(proto: "methods"), + 23: .same(proto: "resources"), + 24: .same(proto: "etag"), + 25: .standard(proto: "owner_domain"), + 26: .standard(proto: "owner_name"), + 27: .standard(proto: "version_module"), + 28: .standard(proto: "canonical_name"), + 29: .standard(proto: "fully_encode_reserved_expansion"), + 30: .standard(proto: "package_path"), + ] + + fileprivate class _StorageClass { + var _kind: String = String() + var _discoveryVersion: String = String() + var _id: String = String() + var _name: String = String() + var _version: String = String() + var _revision: String = String() + var _title: String = String() + var _description_p: String = String() + var _icons: Discovery_V1_Icons? = nil + var _documentationLink: String = String() + var _labels: [String] = [] + var _protocol: String = String() + var _baseURL: String = String() + var _basePath: String = String() + var _rootURL: String = String() + var _servicePath: String = String() + var _batchPath: String = String() + var _parameters: Discovery_V1_Parameters? = nil + var _auth: Discovery_V1_Auth? = nil + var _features: [String] = [] + var _schemas: Discovery_V1_Schemas? = nil + var _methods: Discovery_V1_Methods? = nil + var _resources: Discovery_V1_Resources? = nil + var _etag: String = String() + var _ownerDomain: String = String() + var _ownerName: String = String() + var _versionModule: Bool = false + var _canonicalName: String = String() + var _fullyEncodeReservedExpansion: Bool = false + var _packagePath: String = String() + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _kind = source._kind + _discoveryVersion = source._discoveryVersion + _id = source._id + _name = source._name + _version = source._version + _revision = source._revision + _title = source._title + _description_p = source._description_p + _icons = source._icons + _documentationLink = source._documentationLink + _labels = source._labels + _protocol = source._protocol + _baseURL = source._baseURL + _basePath = source._basePath + _rootURL = source._rootURL + _servicePath = source._servicePath + _batchPath = source._batchPath + _parameters = source._parameters + _auth = source._auth + _features = source._features + _schemas = source._schemas + _methods = source._methods + _resources = source._resources + _etag = source._etag + _ownerDomain = source._ownerDomain + _ownerName = source._ownerName + _versionModule = source._versionModule + _canonicalName = source._canonicalName + _fullyEncodeReservedExpansion = source._fullyEncodeReservedExpansion + _packagePath = source._packagePath + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Document) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._kind != other_storage._kind {return false} + if _storage._discoveryVersion != other_storage._discoveryVersion {return false} + if _storage._id != other_storage._id {return false} + if _storage._name != other_storage._name {return false} + if _storage._version != other_storage._version {return false} + if _storage._revision != other_storage._revision {return false} + if _storage._title != other_storage._title {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._icons != other_storage._icons {return false} + if _storage._documentationLink != other_storage._documentationLink {return false} + if _storage._labels != other_storage._labels {return false} + if _storage._protocol != other_storage._protocol {return false} + if _storage._baseURL != other_storage._baseURL {return false} + if _storage._basePath != other_storage._basePath {return false} + if _storage._rootURL != other_storage._rootURL {return false} + if _storage._servicePath != other_storage._servicePath {return false} + if _storage._batchPath != other_storage._batchPath {return false} + if _storage._parameters != other_storage._parameters {return false} + if _storage._auth != other_storage._auth {return false} + if _storage._features != other_storage._features {return false} + if _storage._schemas != other_storage._schemas {return false} + if _storage._methods != other_storage._methods {return false} + if _storage._resources != other_storage._resources {return false} + if _storage._etag != other_storage._etag {return false} + if _storage._ownerDomain != other_storage._ownerDomain {return false} + if _storage._ownerName != other_storage._ownerName {return false} + if _storage._versionModule != other_storage._versionModule {return false} + if _storage._canonicalName != other_storage._canonicalName {return false} + if _storage._fullyEncodeReservedExpansion != other_storage._fullyEncodeReservedExpansion {return false} + if _storage._packagePath != other_storage._packagePath {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Icons: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "x16"), + 2: .same(proto: "x32"), + ] + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Icons) -> Bool { + if self.x16 != other.x16 {return false} + if self.x32 != other.x32 {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_MediaUpload: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "accept"), + 2: .standard(proto: "max_size"), + 3: .same(proto: "protocols"), + 4: .standard(proto: "supports_subscription"), + ] + + fileprivate class _StorageClass { + var _accept: [String] = [] + var _maxSize: String = String() + var _protocols: Discovery_V1_Protocols? = nil + var _supportsSubscription: Bool = false + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _accept = source._accept + _maxSize = source._maxSize + _protocols = source._protocols + _supportsSubscription = source._supportsSubscription + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_MediaUpload) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._accept != other_storage._accept {return false} + if _storage._maxSize != other_storage._maxSize {return false} + if _storage._protocols != other_storage._protocols {return false} + if _storage._supportsSubscription != other_storage._supportsSubscription {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Method: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "id"), + 2: .same(proto: "path"), + 3: .standard(proto: "http_method"), + 4: .same(proto: "description"), + 5: .same(proto: "parameters"), + 6: .standard(proto: "parameter_order"), + 7: .same(proto: "request"), + 8: .same(proto: "response"), + 9: .same(proto: "scopes"), + 10: .standard(proto: "supports_media_download"), + 11: .standard(proto: "supports_media_upload"), + 12: .standard(proto: "use_media_download_service"), + 13: .standard(proto: "media_upload"), + 14: .standard(proto: "supports_subscription"), + 15: .standard(proto: "flat_path"), + 16: .standard(proto: "etag_required"), + ] + + fileprivate class _StorageClass { + var _id: String = String() + var _path: String = String() + var _httpMethod: String = String() + var _description_p: String = String() + var _parameters: Discovery_V1_Parameters? = nil + var _parameterOrder: [String] = [] + var _request: Discovery_V1_Request? = nil + var _response: Discovery_V1_Response? = nil + var _scopes: [String] = [] + var _supportsMediaDownload: Bool = false + var _supportsMediaUpload: Bool = false + var _useMediaDownloadService: Bool = false + var _mediaUpload: Discovery_V1_MediaUpload? = nil + var _supportsSubscription: Bool = false + var _flatPath: String = String() + var _etagRequired: Bool = false + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _id = source._id + _path = source._path + _httpMethod = source._httpMethod + _description_p = source._description_p + _parameters = source._parameters + _parameterOrder = source._parameterOrder + _request = source._request + _response = source._response + _scopes = source._scopes + _supportsMediaDownload = source._supportsMediaDownload + _supportsMediaUpload = source._supportsMediaUpload + _useMediaDownloadService = source._useMediaDownloadService + _mediaUpload = source._mediaUpload + _supportsSubscription = source._supportsSubscription + _flatPath = source._flatPath + _etagRequired = source._etagRequired + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Method) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._id != other_storage._id {return false} + if _storage._path != other_storage._path {return false} + if _storage._httpMethod != other_storage._httpMethod {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._parameters != other_storage._parameters {return false} + if _storage._parameterOrder != other_storage._parameterOrder {return false} + if _storage._request != other_storage._request {return false} + if _storage._response != other_storage._response {return false} + if _storage._scopes != other_storage._scopes {return false} + if _storage._supportsMediaDownload != other_storage._supportsMediaDownload {return false} + if _storage._supportsMediaUpload != other_storage._supportsMediaUpload {return false} + if _storage._useMediaDownloadService != other_storage._useMediaDownloadService {return false} + if _storage._mediaUpload != other_storage._mediaUpload {return false} + if _storage._supportsSubscription != other_storage._supportsSubscription {return false} + if _storage._flatPath != other_storage._flatPath {return false} + if _storage._etagRequired != other_storage._etagRequired {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Methods: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Methods) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_NamedMethod: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Discovery_V1_Method? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_NamedMethod) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_NamedParameter: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Discovery_V1_Parameter? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_NamedParameter) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_NamedResource: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Discovery_V1_Resource? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_NamedResource) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_NamedSchema: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Discovery_V1_Schema? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_NamedSchema) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_NamedScope: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Discovery_V1_Scope? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_NamedScope) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Oauth2: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "scopes"), + ] + + fileprivate class _StorageClass { + var _scopes: Discovery_V1_Scopes? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _scopes = source._scopes + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Oauth2) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._scopes != other_storage._scopes {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Parameter: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "id"), + 2: .same(proto: "type"), + 3: .standard(proto: "_ref"), + 4: .same(proto: "description"), + 5: .same(proto: "default"), + 6: .same(proto: "required"), + 7: .same(proto: "format"), + 8: .same(proto: "pattern"), + 9: .same(proto: "minimum"), + 10: .same(proto: "maximum"), + 11: .same(proto: "enum"), + 12: .standard(proto: "enum_descriptions"), + 13: .same(proto: "repeated"), + 14: .same(proto: "location"), + 15: .same(proto: "properties"), + 16: .standard(proto: "additional_properties"), + 17: .same(proto: "items"), + 18: .same(proto: "annotations"), + ] + + fileprivate class _StorageClass { + var _id: String = String() + var _type: String = String() + var _ref: String = String() + var _description_p: String = String() + var _default: String = String() + var _required: Bool = false + var _format: String = String() + var _pattern: String = String() + var _minimum: String = String() + var _maximum: String = String() + var _enum: [String] = [] + var _enumDescriptions: [String] = [] + var _repeated: Bool = false + var _location: String = String() + var _properties: Discovery_V1_Schemas? = nil + var _additionalProperties: Discovery_V1_Schema? = nil + var _items: Discovery_V1_Schema? = nil + var _annotations: Discovery_V1_Annotations? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _id = source._id + _type = source._type + _ref = source._ref + _description_p = source._description_p + _default = source._default + _required = source._required + _format = source._format + _pattern = source._pattern + _minimum = source._minimum + _maximum = source._maximum + _enum = source._enum + _enumDescriptions = source._enumDescriptions + _repeated = source._repeated + _location = source._location + _properties = source._properties + _additionalProperties = source._additionalProperties + _items = source._items + _annotations = source._annotations + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Parameter) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._id != other_storage._id {return false} + if _storage._type != other_storage._type {return false} + if _storage._ref != other_storage._ref {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._default != other_storage._default {return false} + if _storage._required != other_storage._required {return false} + if _storage._format != other_storage._format {return false} + if _storage._pattern != other_storage._pattern {return false} + if _storage._minimum != other_storage._minimum {return false} + if _storage._maximum != other_storage._maximum {return false} + if _storage._enum != other_storage._enum {return false} + if _storage._enumDescriptions != other_storage._enumDescriptions {return false} + if _storage._repeated != other_storage._repeated {return false} + if _storage._location != other_storage._location {return false} + if _storage._properties != other_storage._properties {return false} + if _storage._additionalProperties != other_storage._additionalProperties {return false} + if _storage._items != other_storage._items {return false} + if _storage._annotations != other_storage._annotations {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Parameters: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Parameters) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Protocols: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "simple"), + 2: .same(proto: "resumable"), + ] + + fileprivate class _StorageClass { + var _simple: Discovery_V1_Simple? = nil + var _resumable: Discovery_V1_Resumable? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _simple = source._simple + _resumable = source._resumable + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Protocols) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._simple != other_storage._simple {return false} + if _storage._resumable != other_storage._resumable {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Request: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "_ref"), + 2: .standard(proto: "parameter_name"), + ] + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Request) -> Bool { + if self.ref != other.ref {return false} + if self.parameterName != other.parameterName {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Resource: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "methods"), + 2: .same(proto: "resources"), + ] + + fileprivate class _StorageClass { + var _methods: Discovery_V1_Methods? = nil + var _resources: Discovery_V1_Resources? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _methods = source._methods + _resources = source._resources + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Resource) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._methods != other_storage._methods {return false} + if _storage._resources != other_storage._resources {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Resources: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Resources) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Response: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "_ref"), + ] + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Response) -> Bool { + if self.ref != other.ref {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Resumable: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "multipart"), + 2: .same(proto: "path"), + ] + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Resumable) -> Bool { + if self.multipart != other.multipart {return false} + if self.path != other.path {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Schema: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "id"), + 2: .same(proto: "type"), + 3: .same(proto: "description"), + 4: .same(proto: "default"), + 5: .same(proto: "required"), + 6: .same(proto: "format"), + 7: .same(proto: "pattern"), + 8: .same(proto: "minimum"), + 9: .same(proto: "maximum"), + 10: .same(proto: "enum"), + 11: .standard(proto: "enum_descriptions"), + 12: .same(proto: "repeated"), + 13: .same(proto: "location"), + 14: .same(proto: "properties"), + 15: .standard(proto: "additional_properties"), + 16: .same(proto: "items"), + 17: .standard(proto: "_ref"), + 18: .same(proto: "annotations"), + 19: .standard(proto: "read_only"), + ] + + fileprivate class _StorageClass { + var _id: String = String() + var _type: String = String() + var _description_p: String = String() + var _default: String = String() + var _required: Bool = false + var _format: String = String() + var _pattern: String = String() + var _minimum: String = String() + var _maximum: String = String() + var _enum: [String] = [] + var _enumDescriptions: [String] = [] + var _repeated: Bool = false + var _location: String = String() + var _properties: Discovery_V1_Schemas? = nil + var _additionalProperties: Discovery_V1_Schema? = nil + var _items: Discovery_V1_Schema? = nil + var _ref: String = String() + var _annotations: Discovery_V1_Annotations? = nil + var _readOnly: Bool = false + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _id = source._id + _type = source._type + _description_p = source._description_p + _default = source._default + _required = source._required + _format = source._format + _pattern = source._pattern + _minimum = source._minimum + _maximum = source._maximum + _enum = source._enum + _enumDescriptions = source._enumDescriptions + _repeated = source._repeated + _location = source._location + _properties = source._properties + _additionalProperties = source._additionalProperties + _items = source._items + _ref = source._ref + _annotations = source._annotations + _readOnly = source._readOnly + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Schema) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._id != other_storage._id {return false} + if _storage._type != other_storage._type {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._default != other_storage._default {return false} + if _storage._required != other_storage._required {return false} + if _storage._format != other_storage._format {return false} + if _storage._pattern != other_storage._pattern {return false} + if _storage._minimum != other_storage._minimum {return false} + if _storage._maximum != other_storage._maximum {return false} + if _storage._enum != other_storage._enum {return false} + if _storage._enumDescriptions != other_storage._enumDescriptions {return false} + if _storage._repeated != other_storage._repeated {return false} + if _storage._location != other_storage._location {return false} + if _storage._properties != other_storage._properties {return false} + if _storage._additionalProperties != other_storage._additionalProperties {return false} + if _storage._items != other_storage._items {return false} + if _storage._ref != other_storage._ref {return false} + if _storage._annotations != other_storage._annotations {return false} + if _storage._readOnly != other_storage._readOnly {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Schemas: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Schemas) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Scope: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "description"), + ] + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Scope) -> Bool { + if self.description_p != other.description_p {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Scopes: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Scopes) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Simple: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "multipart"), + 2: .same(proto: "path"), + ] + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Simple) -> Bool { + if self.multipart != other.multipart {return false} + if self.path != other.path {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_StringArray: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "value"), + ] + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_StringArray) -> Bool { + if self.value != other.value {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/Gnostic/plugin.pb.swift b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/Gnostic/plugin.pb.swift new file mode 100644 index 000000000..9e19a082f --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/Gnostic/plugin.pb.swift @@ -0,0 +1,499 @@ +// DO NOT EDIT. +// +// Generated by the Swift generator plugin for the protocol buffer compiler. +// Source: github.com/googleapis/gnostic/plugins/plugin.proto +// +// For information on using the generated types, please see the documenation: +// https://github.com/apple/swift-protobuf/ + +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// gnostic can be extended with plugins. +// A plugin is uset a program that reads a Request from stdin +// and writes a Response to stdout. +// +// A plugin executable needs only to be placed somewhere in the path. The +// plugin should be named "gnostic_$NAME", and will then be used when the +// flag "--${NAME}_out" is passed to gnostic. + +import Foundation +import SwiftProtobuf + +// If the compiler emits an error on this type, it is because this file +// was generated by a version of the `protoc` Swift plug-in that is +// incompatible with the version of SwiftProtobuf to which you are linking. +// Please ensure that your are building against the same version of the API +// that was used to generate this file. +fileprivate struct _GeneratedWithProtocGenSwiftVersion: SwiftProtobuf.ProtobufAPIVersionCheck { + struct _2: SwiftProtobuf.ProtobufAPIVersion_2 {} + typealias Version = _2 +} + +/// The version number of gnostic. +public struct Gnostic_Plugin_V1_Version: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Version" + + public var major: Int32 = 0 + + public var minor: Int32 = 0 + + public var patch: Int32 = 0 + + /// A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should + /// be empty for mainline stable releases. + public var suffix: String = String() + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularInt32Field(value: &self.major) + case 2: try decoder.decodeSingularInt32Field(value: &self.minor) + case 3: try decoder.decodeSingularInt32Field(value: &self.patch) + case 4: try decoder.decodeSingularStringField(value: &self.suffix) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if self.major != 0 { + try visitor.visitSingularInt32Field(value: self.major, fieldNumber: 1) + } + if self.minor != 0 { + try visitor.visitSingularInt32Field(value: self.minor, fieldNumber: 2) + } + if self.patch != 0 { + try visitor.visitSingularInt32Field(value: self.patch, fieldNumber: 3) + } + if !self.suffix.isEmpty { + try visitor.visitSingularStringField(value: self.suffix, fieldNumber: 4) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// A parameter passed to the plugin from (or through) gnostic. +public struct Gnostic_Plugin_V1_Parameter: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Parameter" + + /// The name of the parameter as specified in the option string + public var name: String = String() + + /// The parameter value as specified in the option string + public var value: String = String() + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.name) + case 2: try decoder.decodeSingularStringField(value: &self.value) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.name.isEmpty { + try visitor.visitSingularStringField(value: self.name, fieldNumber: 1) + } + if !self.value.isEmpty { + try visitor.visitSingularStringField(value: self.value, fieldNumber: 2) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// An encoded Request is written to the plugin's stdin. +public struct Gnostic_Plugin_V1_Request: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Request" + + /// filename or URL of the original source document + public var sourceName: String { + get {return _storage._sourceName} + set {_uniqueStorage()._sourceName = newValue} + } + + /// Output path specified in the plugin invocation. + public var outputPath: String { + get {return _storage._outputPath} + set {_uniqueStorage()._outputPath = newValue} + } + + /// Plugin parameters parsed from the invocation string. + public var parameters: [Gnostic_Plugin_V1_Parameter] { + get {return _storage._parameters} + set {_uniqueStorage()._parameters = newValue} + } + + /// The version number of gnostic. + public var compilerVersion: Gnostic_Plugin_V1_Version { + get {return _storage._compilerVersion ?? Gnostic_Plugin_V1_Version()} + set {_uniqueStorage()._compilerVersion = newValue} + } + /// Returns true if `compilerVersion` has been explicitly set. + public var hasCompilerVersion: Bool {return _storage._compilerVersion != nil} + /// Clears the value of `compilerVersion`. Subsequent reads from it will return its default value. + public mutating func clearCompilerVersion() {_storage._compilerVersion = nil} + + /// OpenAPI v2 API representation + public var openapi2: Openapi_V2_Document { + get {return _storage._openapi2 ?? Openapi_V2_Document()} + set {_uniqueStorage()._openapi2 = newValue} + } + /// Returns true if `openapi2` has been explicitly set. + public var hasOpenapi2: Bool {return _storage._openapi2 != nil} + /// Clears the value of `openapi2`. Subsequent reads from it will return its default value. + public mutating func clearOpenapi2() {_storage._openapi2 = nil} + + /// OpenAPI v3 API representation + public var openapi3: Openapi_V3_Document { + get {return _storage._openapi3 ?? Openapi_V3_Document()} + set {_uniqueStorage()._openapi3 = newValue} + } + /// Returns true if `openapi3` has been explicitly set. + public var hasOpenapi3: Bool {return _storage._openapi3 != nil} + /// Clears the value of `openapi3`. Subsequent reads from it will return its default value. + public mutating func clearOpenapi3() {_storage._openapi3 = nil} + + /// Discovery API representation + public var discovery: Discovery_V1_Document { + get {return _storage._discovery ?? Discovery_V1_Document()} + set {_uniqueStorage()._discovery = newValue} + } + /// Returns true if `discovery` has been explicitly set. + public var hasDiscovery: Bool {return _storage._discovery != nil} + /// Clears the value of `discovery`. Subsequent reads from it will return its default value. + public mutating func clearDiscovery() {_storage._discovery = nil} + + /// generated code surface representation + public var surface: Surface_V1_Model { + get {return _storage._surface ?? Surface_V1_Model()} + set {_uniqueStorage()._surface = newValue} + } + /// Returns true if `surface` has been explicitly set. + public var hasSurface: Bool {return _storage._surface != nil} + /// Clears the value of `surface`. Subsequent reads from it will return its default value. + public mutating func clearSurface() {_storage._surface = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._sourceName) + case 2: try decoder.decodeSingularStringField(value: &_storage._outputPath) + case 3: try decoder.decodeRepeatedMessageField(value: &_storage._parameters) + case 4: try decoder.decodeSingularMessageField(value: &_storage._compilerVersion) + case 5: try decoder.decodeSingularMessageField(value: &_storage._openapi2) + case 6: try decoder.decodeSingularMessageField(value: &_storage._openapi3) + case 7: try decoder.decodeSingularMessageField(value: &_storage._discovery) + case 8: try decoder.decodeSingularMessageField(value: &_storage._surface) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._sourceName.isEmpty { + try visitor.visitSingularStringField(value: _storage._sourceName, fieldNumber: 1) + } + if !_storage._outputPath.isEmpty { + try visitor.visitSingularStringField(value: _storage._outputPath, fieldNumber: 2) + } + if !_storage._parameters.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._parameters, fieldNumber: 3) + } + if let v = _storage._compilerVersion { + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + } + if let v = _storage._openapi2 { + try visitor.visitSingularMessageField(value: v, fieldNumber: 5) + } + if let v = _storage._openapi3 { + try visitor.visitSingularMessageField(value: v, fieldNumber: 6) + } + if let v = _storage._discovery { + try visitor.visitSingularMessageField(value: v, fieldNumber: 7) + } + if let v = _storage._surface { + try visitor.visitSingularMessageField(value: v, fieldNumber: 8) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// The plugin writes an encoded Response to stdout. +public struct Gnostic_Plugin_V1_Response: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Response" + + /// Error message. If non-empty, the plugin failed. + /// The plugin process should exit with status code zero + /// even if it reports an error in this way. + /// + /// This should be used to indicate errors which prevent the plugin from + /// operating as intended. Errors which indicate a problem in openapic + /// itself -- such as the input Document being unparseable -- should be + /// reported by writing a message to stderr and exiting with a non-zero + /// status code. + public var errors: [String] = [] + + /// file output, each file will be written by openapic to an appropriate location. + public var files: [Gnostic_Plugin_V1_File] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedStringField(value: &self.errors) + case 2: try decoder.decodeRepeatedMessageField(value: &self.files) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.errors.isEmpty { + try visitor.visitRepeatedStringField(value: self.errors, fieldNumber: 1) + } + if !self.files.isEmpty { + try visitor.visitRepeatedMessageField(value: self.files, fieldNumber: 2) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// File describes a file generated by a plugin. +public struct Gnostic_Plugin_V1_File: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".File" + + /// name of the file + public var name: String = String() + + /// data to be written to the file + public var data: Data = SwiftProtobuf.Internal.emptyData + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.name) + case 2: try decoder.decodeSingularBytesField(value: &self.data) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.name.isEmpty { + try visitor.visitSingularStringField(value: self.name, fieldNumber: 1) + } + if !self.data.isEmpty { + try visitor.visitSingularBytesField(value: self.data, fieldNumber: 2) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +// MARK: - Code below here is support for the SwiftProtobuf runtime. + +fileprivate let _protobuf_package = "gnostic.plugin.v1" + +extension Gnostic_Plugin_V1_Version: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "major"), + 2: .same(proto: "minor"), + 3: .same(proto: "patch"), + 4: .same(proto: "suffix"), + ] + + public func _protobuf_generated_isEqualTo(other: Gnostic_Plugin_V1_Version) -> Bool { + if self.major != other.major {return false} + if self.minor != other.minor {return false} + if self.patch != other.patch {return false} + if self.suffix != other.suffix {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Gnostic_Plugin_V1_Parameter: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + public func _protobuf_generated_isEqualTo(other: Gnostic_Plugin_V1_Parameter) -> Bool { + if self.name != other.name {return false} + if self.value != other.value {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Gnostic_Plugin_V1_Request: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "source_name"), + 2: .standard(proto: "output_path"), + 3: .same(proto: "parameters"), + 4: .standard(proto: "compiler_version"), + 5: .same(proto: "openapi2"), + 6: .same(proto: "openapi3"), + 7: .same(proto: "discovery"), + 8: .same(proto: "surface"), + ] + + fileprivate class _StorageClass { + var _sourceName: String = String() + var _outputPath: String = String() + var _parameters: [Gnostic_Plugin_V1_Parameter] = [] + var _compilerVersion: Gnostic_Plugin_V1_Version? = nil + var _openapi2: Openapi_V2_Document? = nil + var _openapi3: Openapi_V3_Document? = nil + var _discovery: Discovery_V1_Document? = nil + var _surface: Surface_V1_Model? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _sourceName = source._sourceName + _outputPath = source._outputPath + _parameters = source._parameters + _compilerVersion = source._compilerVersion + _openapi2 = source._openapi2 + _openapi3 = source._openapi3 + _discovery = source._discovery + _surface = source._surface + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Gnostic_Plugin_V1_Request) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._sourceName != other_storage._sourceName {return false} + if _storage._outputPath != other_storage._outputPath {return false} + if _storage._parameters != other_storage._parameters {return false} + if _storage._compilerVersion != other_storage._compilerVersion {return false} + if _storage._openapi2 != other_storage._openapi2 {return false} + if _storage._openapi3 != other_storage._openapi3 {return false} + if _storage._discovery != other_storage._discovery {return false} + if _storage._surface != other_storage._surface {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Gnostic_Plugin_V1_Response: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "errors"), + 2: .same(proto: "files"), + ] + + public func _protobuf_generated_isEqualTo(other: Gnostic_Plugin_V1_Response) -> Bool { + if self.errors != other.errors {return false} + if self.files != other.files {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Gnostic_Plugin_V1_File: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "data"), + ] + + public func _protobuf_generated_isEqualTo(other: Gnostic_Plugin_V1_File) -> Bool { + if self.name != other.name {return false} + if self.data != other.data {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/Gnostic/surface.pb.swift b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/Gnostic/surface.pb.swift new file mode 100644 index 000000000..f61b8e28c --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/Gnostic/surface.pb.swift @@ -0,0 +1,579 @@ +// DO NOT EDIT. +// +// Generated by the Swift generator plugin for the protocol buffer compiler. +// Source: github.com/googleapis/gnostic/surface/surface.proto +// +// For information on using the generated types, please see the documenation: +// https://github.com/apple/swift-protobuf/ + +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Model an API surface for code generation. + +import Foundation +import SwiftProtobuf + +// If the compiler emits an error on this type, it is because this file +// was generated by a version of the `protoc` Swift plug-in that is +// incompatible with the version of SwiftProtobuf to which you are linking. +// Please ensure that your are building against the same version of the API +// that was used to generate this file. +fileprivate struct _GeneratedWithProtocGenSwiftVersion: SwiftProtobuf.ProtobufAPIVersionCheck { + struct _2: SwiftProtobuf.ProtobufAPIVersion_2 {} + typealias Version = _2 +} + +public enum Surface_V1_FieldKind: SwiftProtobuf.Enum { + public typealias RawValue = Int + case scalar // = 0 + case map // = 1 + case array // = 2 + case reference // = 3 + case UNRECOGNIZED(Int) + + public init() { + self = .scalar + } + + public init?(rawValue: Int) { + switch rawValue { + case 0: self = .scalar + case 1: self = .map + case 2: self = .array + case 3: self = .reference + default: self = .UNRECOGNIZED(rawValue) + } + } + + public var rawValue: Int { + switch self { + case .scalar: return 0 + case .map: return 1 + case .array: return 2 + case .reference: return 3 + case .UNRECOGNIZED(let i): return i + } + } + +} + +public enum Surface_V1_TypeKind: SwiftProtobuf.Enum { + public typealias RawValue = Int + + /// implement with named fields + case `struct` // = 0 + + /// implement with a map + case object // = 1 + case UNRECOGNIZED(Int) + + public init() { + self = .struct + } + + public init?(rawValue: Int) { + switch rawValue { + case 0: self = .struct + case 1: self = .object + default: self = .UNRECOGNIZED(rawValue) + } + } + + public var rawValue: Int { + switch self { + case .struct: return 0 + case .object: return 1 + case .UNRECOGNIZED(let i): return i + } + } + +} + +public enum Surface_V1_Position: SwiftProtobuf.Enum { + public typealias RawValue = Int + case body // = 0 + case header // = 1 + case formdata // = 2 + case query // = 3 + case path // = 4 + case UNRECOGNIZED(Int) + + public init() { + self = .body + } + + public init?(rawValue: Int) { + switch rawValue { + case 0: self = .body + case 1: self = .header + case 2: self = .formdata + case 3: self = .query + case 4: self = .path + default: self = .UNRECOGNIZED(rawValue) + } + } + + public var rawValue: Int { + switch self { + case .body: return 0 + case .header: return 1 + case .formdata: return 2 + case .query: return 3 + case .path: return 4 + case .UNRECOGNIZED(let i): return i + } + } + +} + +/// Field is a field in a definition and can be associated with +/// a position in a request structure. +public struct Surface_V1_Field: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Field" + + /// the name as specified in the API description + public var name: String = String() + + /// the specified content type of the field + public var type: String = String() + + /// what kind of thing is this field? scalar, reference, array, map of strings to the specified type + public var kind: Surface_V1_FieldKind = .scalar + + /// the specified format of the field + public var format: String = String() + + /// "body", "header", "formdata", "query", or "path" + public var position: Surface_V1_Position = .body + + /// the programming-language native type of the field + public var nativeType: String = String() + + /// the name to use for a data structure field + public var fieldName: String = String() + + /// the name to use for a function parameter + public var parameterName: String = String() + + /// true if this field should be serialized (to JSON, etc) + public var serialize: Bool = false + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.name) + case 2: try decoder.decodeSingularStringField(value: &self.type) + case 3: try decoder.decodeSingularEnumField(value: &self.kind) + case 4: try decoder.decodeSingularStringField(value: &self.format) + case 5: try decoder.decodeSingularEnumField(value: &self.position) + case 6: try decoder.decodeSingularStringField(value: &self.nativeType) + case 7: try decoder.decodeSingularStringField(value: &self.fieldName) + case 8: try decoder.decodeSingularStringField(value: &self.parameterName) + case 9: try decoder.decodeSingularBoolField(value: &self.serialize) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.name.isEmpty { + try visitor.visitSingularStringField(value: self.name, fieldNumber: 1) + } + if !self.type.isEmpty { + try visitor.visitSingularStringField(value: self.type, fieldNumber: 2) + } + if self.kind != .scalar { + try visitor.visitSingularEnumField(value: self.kind, fieldNumber: 3) + } + if !self.format.isEmpty { + try visitor.visitSingularStringField(value: self.format, fieldNumber: 4) + } + if self.position != .body { + try visitor.visitSingularEnumField(value: self.position, fieldNumber: 5) + } + if !self.nativeType.isEmpty { + try visitor.visitSingularStringField(value: self.nativeType, fieldNumber: 6) + } + if !self.fieldName.isEmpty { + try visitor.visitSingularStringField(value: self.fieldName, fieldNumber: 7) + } + if !self.parameterName.isEmpty { + try visitor.visitSingularStringField(value: self.parameterName, fieldNumber: 8) + } + if self.serialize != false { + try visitor.visitSingularBoolField(value: self.serialize, fieldNumber: 9) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Type typically corresponds to a definition, parameter, or response +/// in an API and is represented by a type in generated code. +public struct Surface_V1_Type: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Type" + + /// the name to use for the type + public var name: String = String() + + /// a meta-description of the type (struct, map, etc) + public var kind: Surface_V1_TypeKind = .struct + + /// a comment describing the type + public var description_p: String = String() + + /// if the type is a map, this is its content type + public var contentType: String = String() + + /// the fields of the type + public var fields: [Surface_V1_Field] = [] + + /// language-specific type name + public var typeName: String = String() + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.name) + case 2: try decoder.decodeSingularEnumField(value: &self.kind) + case 3: try decoder.decodeSingularStringField(value: &self.description_p) + case 4: try decoder.decodeSingularStringField(value: &self.contentType) + case 5: try decoder.decodeRepeatedMessageField(value: &self.fields) + case 6: try decoder.decodeSingularStringField(value: &self.typeName) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.name.isEmpty { + try visitor.visitSingularStringField(value: self.name, fieldNumber: 1) + } + if self.kind != .struct { + try visitor.visitSingularEnumField(value: self.kind, fieldNumber: 2) + } + if !self.description_p.isEmpty { + try visitor.visitSingularStringField(value: self.description_p, fieldNumber: 3) + } + if !self.contentType.isEmpty { + try visitor.visitSingularStringField(value: self.contentType, fieldNumber: 4) + } + if !self.fields.isEmpty { + try visitor.visitRepeatedMessageField(value: self.fields, fieldNumber: 5) + } + if !self.typeName.isEmpty { + try visitor.visitSingularStringField(value: self.typeName, fieldNumber: 6) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Method is an operation of an API and typically has associated client and server code. +public struct Surface_V1_Method: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Method" + + /// Operation ID + public var operation: String = String() + + /// HTTP path + public var path: String = String() + + /// HTTP method name + public var method: String = String() + + /// description of method + public var description_p: String = String() + + /// Operation name, possibly generated from method and path + public var name: String = String() + + /// name of the generated handler + public var handlerName: String = String() + + /// name of the processing function in the service interface + public var processorName: String = String() + + /// name of client + public var clientName: String = String() + + /// parameters (input), with fields corresponding to input parameters + public var parametersTypeName: String = String() + + /// responses (output), with fields corresponding to possible response values + public var responsesTypeName: String = String() + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.operation) + case 2: try decoder.decodeSingularStringField(value: &self.path) + case 3: try decoder.decodeSingularStringField(value: &self.method) + case 4: try decoder.decodeSingularStringField(value: &self.description_p) + case 5: try decoder.decodeSingularStringField(value: &self.name) + case 6: try decoder.decodeSingularStringField(value: &self.handlerName) + case 7: try decoder.decodeSingularStringField(value: &self.processorName) + case 8: try decoder.decodeSingularStringField(value: &self.clientName) + case 9: try decoder.decodeSingularStringField(value: &self.parametersTypeName) + case 10: try decoder.decodeSingularStringField(value: &self.responsesTypeName) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.operation.isEmpty { + try visitor.visitSingularStringField(value: self.operation, fieldNumber: 1) + } + if !self.path.isEmpty { + try visitor.visitSingularStringField(value: self.path, fieldNumber: 2) + } + if !self.method.isEmpty { + try visitor.visitSingularStringField(value: self.method, fieldNumber: 3) + } + if !self.description_p.isEmpty { + try visitor.visitSingularStringField(value: self.description_p, fieldNumber: 4) + } + if !self.name.isEmpty { + try visitor.visitSingularStringField(value: self.name, fieldNumber: 5) + } + if !self.handlerName.isEmpty { + try visitor.visitSingularStringField(value: self.handlerName, fieldNumber: 6) + } + if !self.processorName.isEmpty { + try visitor.visitSingularStringField(value: self.processorName, fieldNumber: 7) + } + if !self.clientName.isEmpty { + try visitor.visitSingularStringField(value: self.clientName, fieldNumber: 8) + } + if !self.parametersTypeName.isEmpty { + try visitor.visitSingularStringField(value: self.parametersTypeName, fieldNumber: 9) + } + if !self.responsesTypeName.isEmpty { + try visitor.visitSingularStringField(value: self.responsesTypeName, fieldNumber: 10) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Model represents an API for code generation. +public struct Surface_V1_Model: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Model" + + /// a free-form title for the API + public var name: String = String() + + /// the types used by the API + public var types: [Surface_V1_Type] = [] + + /// the methods (functions) of the API + public var methods: [Surface_V1_Method] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.name) + case 2: try decoder.decodeRepeatedMessageField(value: &self.types) + case 3: try decoder.decodeRepeatedMessageField(value: &self.methods) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.name.isEmpty { + try visitor.visitSingularStringField(value: self.name, fieldNumber: 1) + } + if !self.types.isEmpty { + try visitor.visitRepeatedMessageField(value: self.types, fieldNumber: 2) + } + if !self.methods.isEmpty { + try visitor.visitRepeatedMessageField(value: self.methods, fieldNumber: 3) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +// MARK: - Code below here is support for the SwiftProtobuf runtime. + +fileprivate let _protobuf_package = "surface.v1" + +extension Surface_V1_FieldKind: SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 0: .same(proto: "SCALAR"), + 1: .same(proto: "MAP"), + 2: .same(proto: "ARRAY"), + 3: .same(proto: "REFERENCE"), + ] +} + +extension Surface_V1_TypeKind: SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 0: .same(proto: "STRUCT"), + 1: .same(proto: "OBJECT"), + ] +} + +extension Surface_V1_Position: SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 0: .same(proto: "BODY"), + 1: .same(proto: "HEADER"), + 2: .same(proto: "FORMDATA"), + 3: .same(proto: "QUERY"), + 4: .same(proto: "PATH"), + ] +} + +extension Surface_V1_Field: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "type"), + 3: .same(proto: "kind"), + 4: .same(proto: "format"), + 5: .same(proto: "position"), + 6: .same(proto: "nativeType"), + 7: .same(proto: "fieldName"), + 8: .same(proto: "parameterName"), + 9: .same(proto: "serialize"), + ] + + public func _protobuf_generated_isEqualTo(other: Surface_V1_Field) -> Bool { + if self.name != other.name {return false} + if self.type != other.type {return false} + if self.kind != other.kind {return false} + if self.format != other.format {return false} + if self.position != other.position {return false} + if self.nativeType != other.nativeType {return false} + if self.fieldName != other.fieldName {return false} + if self.parameterName != other.parameterName {return false} + if self.serialize != other.serialize {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Surface_V1_Type: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "kind"), + 3: .same(proto: "description"), + 4: .same(proto: "contentType"), + 5: .same(proto: "fields"), + 6: .same(proto: "typeName"), + ] + + public func _protobuf_generated_isEqualTo(other: Surface_V1_Type) -> Bool { + if self.name != other.name {return false} + if self.kind != other.kind {return false} + if self.description_p != other.description_p {return false} + if self.contentType != other.contentType {return false} + if self.fields != other.fields {return false} + if self.typeName != other.typeName {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Surface_V1_Method: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "operation"), + 2: .same(proto: "path"), + 3: .same(proto: "method"), + 4: .same(proto: "description"), + 5: .same(proto: "name"), + 6: .same(proto: "handlerName"), + 7: .same(proto: "processorName"), + 8: .same(proto: "clientName"), + 9: .same(proto: "parametersTypeName"), + 10: .same(proto: "responsesTypeName"), + ] + + public func _protobuf_generated_isEqualTo(other: Surface_V1_Method) -> Bool { + if self.operation != other.operation {return false} + if self.path != other.path {return false} + if self.method != other.method {return false} + if self.description_p != other.description_p {return false} + if self.name != other.name {return false} + if self.handlerName != other.handlerName {return false} + if self.processorName != other.processorName {return false} + if self.clientName != other.clientName {return false} + if self.parametersTypeName != other.parametersTypeName {return false} + if self.responsesTypeName != other.responsesTypeName {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Surface_V1_Model: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "types"), + 3: .same(proto: "methods"), + ] + + public func _protobuf_generated_isEqualTo(other: Surface_V1_Model) -> Bool { + if self.name != other.name {return false} + if self.types != other.types {return false} + if self.methods != other.methods {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/gnostic-swift-generator/RenderClient.swift b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/gnostic-swift-generator/RenderClient.swift new file mode 100644 index 000000000..4ba9cea79 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/gnostic-swift-generator/RenderClient.swift @@ -0,0 +1,184 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import Foundation +import Gnostic + +extension ServiceRenderer { + + func renderClient() -> String { + var code = CodePrinter() + code.print(header) + code.print() + code.print("// Client code") + code.print() + code.print("import Foundation") + code.print("import Dispatch") + code.print() + code.print(""" +enum ClientError: Swift.Error { + case errorWithCode(Int) +} +""") + code.print() + code.print("public class Client {") + code.indent() + code.print("var service : String") + code.print() + code.print(""" +public init(service: String) { + self.service = service +} +""") + for serviceMethod in self.methods { + code.print() + code.print("// " + serviceMethod.description + " Asynchronous.") + code.print("public func " + serviceMethod.name + "(" + asyncClientParametersDeclaration(serviceMethod) + ") throws {") + code.indent() + code.print("var path = self.service") + code.print("path = path + \"" + serviceMethod.path + "\"") + for serviceTypeField in parametersTypeFields(serviceMethod) { + if serviceTypeField.position == "path" { + code.print("path = path.replacingOccurrences(of:\"{" + + serviceTypeField.name + + "}\", with:\"\\(" + + serviceTypeField.name + + ")\")") + } + } + code.print("guard let url = URL(string:path) else {") + code.indent() + code.print("throw ClientError.errorWithCode(0)") + code.outdent() + code.print("}") + code.print("var request = URLRequest(url:url)") + code.print("request.httpMethod = \"" + serviceMethod.method + "\"") + for serviceTypeField in parametersTypeFields(serviceMethod) { + if serviceTypeField.position == "body" { + code.print("let jsonObject = " + serviceTypeField.name + ".jsonObject()") + code.print("request.httpBody = try JSONSerialization.data(withJSONObject:jsonObject)") + } + } + if hasResponses(serviceMethod) { + code.print("fetch(request) {(data, response, error) in") + code.indent() + code.print("if error != nil {") + code.indent() + code.print("callback(nil, ClientError.errorWithCode(0))") + code.print("return") + code.outdent() + code.print("}") + code.print("guard let httpResponse = response else {") + code.indent() + code.print("callback(nil, ClientError.errorWithCode(0))") + code.print("return") + code.outdent() + code.print("}") + code.print("if httpResponse.statusCode == 200 {") + code.indent() + code.print("if let data = data {") + code.indent() + code.print("let jsonObject = try! JSONSerialization.jsonObject(with:data)") + code.print("if let value = " + serviceMethod.resultTypeName! + "(jsonObject:jsonObject) {") + code.indent() + code.print("callback(value, nil)") + code.print("return") + code.outdent() + code.print("}") + code.outdent() + code.print("}") + code.print("callback(nil, nil)") + code.outdent() + code.print("} else {") + code.indent() + code.print(" callback(nil, ClientError.errorWithCode(httpResponse.statusCode))") + code.outdent() + code.print("}") + code.outdent() + code.print("}") + } else { + code.print("fetch(request) {(data, response, error) in") + code.print("if error != nil {") + code.indent() + code.print("callback(ClientError.errorWithCode(0))") + code.print("return") + code.outdent() + code.print("}") + code.print("guard let httpResponse = response else {") + code.indent() + code.print("callback(ClientError.errorWithCode(0))") + code.print("return") + code.outdent() + code.print("}") + code.print("if httpResponse.statusCode == 200 {") + code.indent() + code.print("callback(nil)") + code.print("} else {") + code.indent() + code.print("callback(ClientError.errorWithCode(httpResponse.statusCode))") + code.outdent() + code.print("}") + code.outdent() + code.print("}") + } + code.outdent() + code.print("}") + code.print() + code.print("// " + serviceMethod.description + " Synchronous.") + code.print("public func " + serviceMethod.name + "(" + syncClientParametersDeclaration(serviceMethod) + ") throws " + syncClientReturnDeclaration(serviceMethod) + " {") + code.indent() + code.print("let sem = DispatchSemaphore(value: 0)") + if hasResponses(serviceMethod) { + code.print("var response : " + serviceMethod.resultTypeName! + "?") + } + code.print("var error : Swift.Error?") + if hasResponses(serviceMethod) { + code.print("try " + serviceMethod.name + "(" + parameterFieldNames(serviceMethod) + ") {r, e in") + code.indent() + code.print("response = r") + } else { + code.print("try " + serviceMethod.name + "(" + parameterFieldNames(serviceMethod) + ") {e in") + code.indent() + } + code.print("error = e") + code.print("sem.signal()") + code.outdent() + code.print("}") + code.print("sem.wait()") + code.print("if let actualError = error {") + code.indent() + code.print("throw actualError") + code.outdent() + code.print("}") + if hasResponses(serviceMethod) { + code.print("if let actualResponse = response {") + code.indent() + code.print("return actualResponse") + code.outdent() + code.print("} else {") + code.indent() + code.print("throw ClientError.errorWithCode(0)") + code.outdent() + code.print("}") + } + code.outdent() + code.print("}") + code.print() + } + code.outdent() + code.print("}") + return code.content + } +} + diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/gnostic-swift-generator/RenderFetch.swift b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/gnostic-swift-generator/RenderFetch.swift new file mode 100644 index 000000000..99494a4b1 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/gnostic-swift-generator/RenderFetch.swift @@ -0,0 +1,157 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import Foundation +import Gnostic + +extension ServiceRenderer { + + func renderFetch() -> String { + var code = CodePrinter() + code.print(header) + + code.print(""" +import Foundation +import Dispatch +import KituraNet + +// fetch makes a synchronous request using KituraNet's ClientRequest class +// https://github.com/IBM-Swift/Kitura-net/blob/master/Sources/KituraNet/ClientRequest.swift +public func fetch(_ urlRequest: URLRequest) -> (Data?, HTTPURLResponse?, Error?) { + var data: Data? + var urlResponse: HTTPURLResponse? + let error: Error? = nil // make this mutable when we start using it + let sem = DispatchSemaphore(value: 0) + guard let method = urlRequest.httpMethod else { + return (data, urlResponse, error) + } + guard let url = urlRequest.url else { + return (data, urlResponse, error) + } + guard let scheme = url.scheme else { + return (data, urlResponse, error) + } + guard let host = url.host else { + return (data, urlResponse, error) + } + guard let port = url.port else { + return (data, urlResponse, error) + } + let options : [ClientRequest.Options] = [ + .method(method), + .schema(scheme), + .hostname(host), + .port(Int16(port)), + .path(url.path), + // headers, etc + ] + let request = HTTP.request(options) { (response) in + guard let response = response else { + sem.signal() + return + } + var responseData = Data() + do { + let code = response.httpStatusCode + try response.readAllData(into: &responseData) + data = responseData + urlResponse = HTTPURLResponse(url:url, + statusCode:code.rawValue, + httpVersion:"HTTP/1.1", + headerFields:[:]) + sem.signal() + return + } catch { + sem.signal() + return + } + } + if let requestData = urlRequest.httpBody { + request.write(from:requestData) + } + request.end() // send the request + // now wait on the semaphore for a response + let result = sem.wait(timeout: DispatchTime.distantFuture) + switch result { + case .success: + return (data, urlResponse, error) + default: // includes .timeout + return (data, urlResponse, error) + } +} + +// fetch makes an asynchronous request using KituraNet's ClientRequest class +// https://github.com/IBM-Swift/Kitura-net/blob/master/Sources/KituraNet/ClientRequest.swift +public func fetch(_ urlRequest: URLRequest, callback:@escaping (Data?, HTTPURLResponse?, Error?) -> ()) { + var data: Data? + var urlResponse: HTTPURLResponse? + let error: Error? = nil // make this mutable when we start using it + guard let method = urlRequest.httpMethod else { + callback (data, urlResponse, error) + return + } + guard let url = urlRequest.url else { + callback (data, urlResponse, error) + return + } + guard let scheme = url.scheme else { + callback (data, urlResponse, error) + return + } + guard let host = url.host else { + callback (data, urlResponse, error) + return + } + guard let port = url.port else { + callback (data, urlResponse, error) + return + } + let options : [ClientRequest.Options] = [ + .method(method), + .schema(scheme), + .hostname(host), + .port(Int16(port)), + .path(url.path), + // headers, etc + ] + let request = HTTP.request(options) { (response) in + guard let response = response else { + callback (data, urlResponse, nil) + return + } + var responseData = Data() + do { + let code = response.httpStatusCode + try response.readAllData(into: &responseData) + data = responseData + urlResponse = HTTPURLResponse(url:url, + statusCode:code.rawValue, + httpVersion:"HTTP/1.1", + headerFields:[:]) + callback (data, urlResponse, nil) + return + } catch { + callback (data, urlResponse, nil) + return + } + } + if let requestData = urlRequest.httpBody { + request.write(from:requestData) + } + request.end() // send the request +} +""") + return code.content + } +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/gnostic-swift-generator/RenderServer.swift b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/gnostic-swift-generator/RenderServer.swift new file mode 100644 index 000000000..837b39fb7 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/gnostic-swift-generator/RenderServer.swift @@ -0,0 +1,166 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import Foundation +import Gnostic + +extension ServiceRenderer { + + func renderServer() -> String { + var code = CodePrinter() + code.print(header) + code.print() + code.print("// Service code") + code.print("import Kitura") + code.print("import KituraNet") + code.print("import Foundation") + code.print("// A server requires an instance of an implementation of this protocol.") + code.print("public protocol Service {") + code.indent() + for serviceMethod in self.methods { + code.print("// " + serviceMethod.description) + code.print("func " + serviceMethod.name + " (" + + protocolParametersDeclaration(serviceMethod) + ") throws " + + protocolReturnDeclaration(serviceMethod)) + } + code.outdent() + code.print("}") + + code.print("func intValue(_ s:String?) -> Int64 {") + code.indent() + code.print("guard let s = s else {") + code.indent() + code.print("return 0") + code.outdent() + code.print("}") + code.print("guard let value = Int64(s) else {") + code.indent() + code.print("return 0") + code.outdent() + code.print("}") + code.print("return value") + code.outdent() + code.print("}") + code.print("public func server(service : Service) -> Router {") + code.indent() + code.print("// Create a new router") + code.print("let router = Router()") + for serviceMethod in self.methods { + code.print("// " + serviceMethod.description) + code.print("router." + lowercase(serviceMethod.method) + "(\"" + kituraPath(serviceMethod) + "\") { req, res, next in") + code.indent() + if hasParameters(serviceMethod) { + code.print("// instantiate the parameters structure") + code.print("let parameters = " + serviceMethod.parametersTypeName! + "()") + for serviceTypeField in parametersTypeFields(serviceMethod) { + if serviceTypeField.position == "path" { + code.print("parameters." + serviceTypeField.name + + " = intValue(req.parameters[\"" + + serviceTypeField.name + "\"])") + } + } + if serviceMethod.method == "POST" { + code.print("// deserialize request from post data") + code.print("let bodyString = try req.readString() ?? \"\"") + code.print("guard let bodyData = bodyString.data(using:.utf8) else {") + code.indent() + code.print("try res.send(status:.badRequest).end()") + code.print("return") + code.outdent() + code.print("}") + code.print("var jsonObject : Any? = nil") + code.print("do {") + code.indent() + code.print("jsonObject = try JSONSerialization.jsonObject(with:bodyData)") + code.outdent() + code.print("} catch {") + code.indent() + code.print("try res.send(status:.badRequest).end()") + code.print("return") + code.outdent() + code.print("}") + code.print("guard let bodyObject = " + serviceMethod.resultTypeName! + "(jsonObject:jsonObject) else {") + code.print("try res.send(status:.badRequest).end()") + code.indent() + code.print("return") + code.outdent() + code.print("}") + code.print("parameters." + bodyParameterFieldName(serviceMethod) + " = bodyObject") + } + } + if hasParameters(serviceMethod) { + if hasResponses(serviceMethod) { + code.print("let responses = try service." + serviceMethod.name + "(parameters)") + } else { + code.print("try service." + serviceMethod.name + "(parameters)") + } + } else { + if hasResponses(serviceMethod) { + code.print("let responses = try service." + serviceMethod.name + "()") + } else { + code.print("try service." + serviceMethod.name + "()") + } + } + if hasResponses(serviceMethod) { + if responsesHasFieldNamedOK(serviceMethod) { + code.print("if let ok = responses.ok {") + code.indent() + code.print("let jsonObject = ok.jsonObject()") + code.print("let responseData = try JSONSerialization.data(withJSONObject:jsonObject)") + code.print("try res.send(data:responseData).end()") + code.print("return") + code.outdent() + code.print("}") + } + if responsesHasFieldNamedError(serviceMethod) { + code.print("if let errorResponse = responses.error {") + code.indent() + code.print("guard let statusCode = HTTPStatusCode(rawValue:Int(errorResponse.code)) else {") + code.indent() + code.print("try res.send(status:.unknown).end()") + code.print("return") + code.outdent() + code.print("}") + code.print("try res.send(status:statusCode).end()") + code.print("return") + code.outdent() + code.print("}") + } + code.print("try res.send(status:.internalServerError).end()") + } else { + code.print("try res.send(status:.OK).end()") + } + code.outdent() + code.print("}") + } + code.print("return router") + code.outdent() + code.print("}") + code.print("public func initialize(service: Service, port:Int) {") + code.indent() + code.print("// Create a new router") + code.print("let router = server(service:service)") + code.print("// Add an HTTP server and connect it to the router") + code.print("Kitura.addHTTPServer(onPort:port, with: router)") + code.outdent() + code.print("}") + code.print("public func run() {") + code.indent() + code.print("// Start the Kitura runloop (this call never returns)") + code.print("Kitura.run()") + code.outdent() + code.print("}") + return code.content + } +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/gnostic-swift-generator/RenderTypes.swift b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/gnostic-swift-generator/RenderTypes.swift new file mode 100644 index 000000000..f75f7904c --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/gnostic-swift-generator/RenderTypes.swift @@ -0,0 +1,112 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import Foundation +import Gnostic + +extension ServiceRenderer { + + func renderTypes() -> String { + var code = CodePrinter() + code.print(header) + code.print() + code.print("// Common type declarations") + for serviceType in self.types { + code.print() + code.print("public class " + serviceType.name + " : CustomStringConvertible {") + code.indent() + for serviceTypeField in serviceType.fields { + code.print("public var " + serviceTypeField.name + " : " + serviceTypeField.typeName + " = " + serviceTypeField.initialValue) + } + code.print() + code.print("public init() {}") + code.print() + if serviceType.isInterfaceType { + code.print("public init?(jsonObject: Any?) {") + code.indent() + code.print("if let jsonDictionary = jsonObject as? [String:Any] {") + code.indent() + for serviceTypeField in serviceType.fields { + code.print("if let value : Any = jsonDictionary[\"" + serviceTypeField.jsonName + "\"] {") + code.indent() + if serviceTypeField.isArrayType { + code.print("var outArray : [" + serviceTypeField.elementType + "] = []") + code.print("let array = value as! [Any]") + code.print("for arrayValue in array {") + code.indent() + code.print("if let element = " + serviceTypeField.elementType + "(jsonObject:arrayValue) {") + code.indent() + code.print("outArray.append(element)") + code.outdent() + code.print("}") + code.outdent() + code.print("}") + code.print("self." + serviceTypeField.name + " = outArray") + } else if serviceTypeField.isCastableType { + code.print("self." + serviceTypeField.name + " = value as! " + serviceTypeField.typeName) + } else if serviceTypeField.isConvertibleType { + code.print("self." + serviceTypeField.name + " = " + serviceTypeField.typeName + "(value)") + } + code.outdent() + code.print("}") + } + code.outdent() + code.print("} else {") + code.indent() + code.print("return nil") + code.outdent() + code.print("}") + code.outdent() + code.print("}") + code.print() + code.print("public func jsonObject() -> Any {") + code.indent() + code.print("var result : [String:Any] = [:]") + for serviceTypeField in serviceType.fields { + if serviceTypeField.isArrayType { + code.print("var outArray : [Any] = []") + code.print("for arrayValue in self." + serviceTypeField.name + " {") + code.indent() + code.print("outArray.append(arrayValue.jsonObject())") + code.outdent() + code.print("}") + code.print("result[\"" + serviceTypeField.jsonName + "\"] = outArray") + } + if serviceTypeField.isCastableType { + code.print("result[\"" + serviceTypeField.jsonName + "\"] = self." + serviceTypeField.name) + } + if serviceTypeField.isConvertibleType { + code.print("result[\"" + serviceTypeField.jsonName + "\"] = self." + serviceTypeField.name + ".jsonObject()") + } + } + code.print("return result") + code.outdent() + code.print("}") + code.print() + } + code.print("public var description : String {") + code.indent() + code.print("return \"[" + serviceType.name + "\" + ") + for serviceTypeField in serviceType.fields { + code.print(" \" " + serviceTypeField.name + ": \" + String(describing:self." + serviceTypeField.name + ") + ") + } + code.print("\"]\"") + code.outdent() + code.print("}") + code.outdent() + code.print("}") + } + return code.content + } +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/gnostic-swift-generator/Renderer.swift b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/gnostic-swift-generator/Renderer.swift new file mode 100644 index 000000000..4e78c3617 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/gnostic-swift-generator/Renderer.swift @@ -0,0 +1,336 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import Foundation +import Gnostic + +extension String { + func capitalizingFirstLetter() -> String { + let first = String(characters.prefix(1)).capitalized + let other = String(characters.dropFirst()) + return first + other + } + + mutating func capitalizeFirstLetter() { + self = self.capitalizingFirstLetter() + } +} + +class ServiceType { + var name : String = "" + var fields : [ServiceTypeField] = [] + var isInterfaceType : Bool = false +} + +class ServiceTypeField { + var name : String = "" + var typeName : String = "" + var isArrayType : Bool = false + var isCastableType : Bool = false + var isConvertibleType : Bool = false + var elementType : String = "" + var jsonName : String = "" + var position: String = "" // "body", "header", "formdata", "query", or "path" + var initialValue : String = "" + + + func setTypeForName(_ name : String, _ format : String) { + switch name { + case "integer": + if format == "int32" { + self.typeName = "Int32" + } else if format == "int64" { + self.typeName = "Int64" + } else { + self.typeName = "Int" + } + self.initialValue = "0" + self.isCastableType = true + default: + self.typeName = name.capitalizingFirstLetter() + self.initialValue = self.typeName + "()" + self.isConvertibleType = true + } + } + + + func setTypeForSchema(_ schema : Openapi_V2_Schema, optional : Bool = false) { + let ref = schema.ref + if ref != "" { + self.typeName = typeForRef(ref) + self.isConvertibleType = true + self.initialValue = self.typeName + "()" + } + if schema.hasType { + let types = schema.type.value + let format = schema.format + if types.count == 1 && types[0] == "string" { + self.typeName = "String" + self.isCastableType = true + self.initialValue = "\"\"" + } + if types.count == 1 && types[0] == "integer" && format == "int32" { + self.typeName = "Int32" + self.isCastableType = true + self.initialValue = "0" + } + if types.count == 1 && types[0] == "array" && schema.hasItems { + // we have an array.., but of what? + let items = schema.items.schema + if items.count == 1 && items[0].ref != "" { + self.isArrayType = true + self.elementType = typeForRef(items[0].ref) + self.typeName = "[" + self.elementType + "]" + self.initialValue = "[]" + } + } + } + // this function is incomplete... so return a string representing anything that we don't handle + if self.typeName == "" { + self.typeName = "\(schema)" + } + if optional { + self.typeName += "?" + self.initialValue = "nil" + } + } +} + +class ServiceMethod { + var name : String = "" + var path : String = "" + var method : String = "" + var description : String = "" + var handlerName : String = "" + var processorName : String = "" + var clientName : String = "" + var resultTypeName : String? + var parametersTypeName : String? + var responsesTypeName : String? + var parametersType : ServiceType? + var responsesType : ServiceType? +} + +func propertyNameForResponseCode(_ code:String) -> String { + switch code { + case "200": + return "ok" + case "default": + return "error" + default: + return code + } +} + +func typeForRef(_ ref : String) -> String { + let parts = ref.components(separatedBy:"/") + return parts.last!.capitalizingFirstLetter() +} + +class ServiceRenderer { + internal var name : String = "" + internal var package: String = "" + internal var types : [ServiceType] = [] + internal var methods : [ServiceMethod] = [] + internal var surface : Surface_V1_Model + + public init(surface : Surface_V1_Model, document : Openapi_V2_Document) { + self.surface = surface + loadService(document:document) + } + + private func loadServiceTypeFromParameters(_ name:String, + _ parameters:[Openapi_V2_ParametersItem]) + -> ServiceType? { + let t = ServiceType() + t.name = name.capitalizingFirstLetter() + "Parameters" + for parametersItem in parameters { + let f = ServiceTypeField() + f.typeName = "\(parametersItem)" + + switch parametersItem.oneof! { + case .parameter(let parameter): + switch parameter.oneof! { + case .bodyParameter(let bodyParameter): + f.name = bodyParameter.name + if bodyParameter.hasSchema { + f.setTypeForSchema(bodyParameter.schema) + f.position = "body" + } + case .nonBodyParameter(let nonBodyParameter): + switch (nonBodyParameter.oneof!) { + case .headerParameterSubSchema(let headerParameter): + f.name = headerParameter.name + f.position = "header" + case .formDataParameterSubSchema(let formDataParameter): + f.name = formDataParameter.name + f.position = "formdata" + case .queryParameterSubSchema(let queryParameter): + f.name = queryParameter.name + f.position = "query" + case .pathParameterSubSchema(let pathParameter): + f.name = pathParameter.name + f.jsonName = pathParameter.name + f.position = "path" + f.setTypeForName(pathParameter.type, pathParameter.format) + } + } + case .jsonReference: // (let reference): + Log("?") + } + t.fields.append(f) + } + if t.fields.count > 0 { + self.types.append(t) + return t + } else { + return nil + } + } + + private func loadServiceTypeFromResponses(_ m:ServiceMethod, + _ name:String, + _ responses:Openapi_V2_Responses) + -> ServiceType? { + let t = ServiceType() + t.name = name.capitalizingFirstLetter() + "Responses" + for responseCode in responses.responseCode { + let f = ServiceTypeField() + f.name = propertyNameForResponseCode(responseCode.name) + f.jsonName = "" + if let responseCodeValueOneOf = responseCode.value.oneof { + switch responseCodeValueOneOf { + case .response(let response): + let schema = response.schema + if let schemaOneOf = schema.oneof { + switch schemaOneOf { + case .schema(let schema): + f.setTypeForSchema(schema, optional:true) + t.fields.append(f) + if f.name == "ok" { + m.resultTypeName = f.typeName.replacingOccurrences(of:"?", with:"") + } + default: + break + } + } + default: + break + } + } + } + if t.fields.count > 0 { + self.types.append(t) + return t + } else { + return nil + } + } + + private func loadOperation(_ operation : Openapi_V2_Operation, + method : String, + path : String) { + let m = ServiceMethod() + m.name = operation.operationID + m.path = path + m.method = method + m.description = operation.description_p + m.handlerName = "handle" + m.name + m.processorName = "" + m.name + m.clientName = m.name + m.parametersType = loadServiceTypeFromParameters(m.name, operation.parameters) + if m.parametersType != nil { + m.parametersTypeName = m.parametersType!.name + } + m.responsesType = loadServiceTypeFromResponses(m, m.name, operation.responses) + if m.responsesType != nil { + m.responsesTypeName = m.responsesType!.name + } + self.methods.append(m) + } + + private func loadService(document : Openapi_V2_Document) { + // collect service type descriptions + for pair in document.definitions.additionalProperties { + let t = ServiceType() + t.isInterfaceType = true + let schema = pair.value + for pair2 in schema.properties.additionalProperties { + let f = ServiceTypeField() + f.name = pair2.name + f.setTypeForSchema(pair2.value) + f.jsonName = pair2.name + t.fields.append(f) + } + t.name = pair.name.capitalizingFirstLetter() + self.types.append(t) + } + // collect service method descriptions + for pair in document.paths.path { + let v = pair.value + if v.hasGet { + loadOperation(v.get, method:"GET", path:pair.name) + } + if v.hasPost { + loadOperation(v.post, method:"POST", path:pair.name) + } + if v.hasPut { + loadOperation(v.put, method:"PUT", path:pair.name) + } + if v.hasDelete { + loadOperation(v.delete, method:"DELETE", path:pair.name) + } + } + } + + public func generate(filenames : [String], response : inout Gnostic_Plugin_V1_Response) throws { + for filename in filenames { + var data : Data? + switch filename { + case "types.swift": + data = renderTypes().data(using:.utf8) + case "server.swift": + data = renderServer().data(using:.utf8) + case "client.swift": + data = renderClient().data(using:.utf8) + case "fetch.swift": + data = renderFetch().data(using:.utf8) + default: + print("error: unable to render \(filename)") + } + if let data = data { + var clientfile = Gnostic_Plugin_V1_File() + clientfile.name = filename + clientfile.data = data + response.files.append(clientfile) + } + } + } +} + +let header = """ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +""" diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/gnostic-swift-generator/helpers.swift b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/gnostic-swift-generator/helpers.swift new file mode 100644 index 000000000..cab487583 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/gnostic-swift-generator/helpers.swift @@ -0,0 +1,166 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +func hasParameters(_ value : Any?) -> Bool { + let method : ServiceMethod = value as! ServiceMethod + return method.parametersType != nil +} + +func hasResponses(_ value : Any?) -> Bool { + let method : ServiceMethod = value as! ServiceMethod + return method.responsesType != nil +} + +func syncClientParametersDeclaration(_ value: Any?) -> String { + let method : ServiceMethod = value as! ServiceMethod + var result = "" + if let parametersType = method.parametersType { + for field in parametersType.fields { + if result != "" { + result += ", " + } + result += field.name + " : " + field.typeName + } + } + return result +} + +func syncClientReturnDeclaration(_ value : Any?) -> String { + let method : ServiceMethod = value as! ServiceMethod + var result = "" + if let resultTypeName = method.resultTypeName { + result = " -> " + resultTypeName + } + return result +} + +func asyncClientParametersDeclaration(_ value : Any?) -> String { + let method : ServiceMethod = value as! ServiceMethod + var result = "" + if let parametersType = method.parametersType { + for field in parametersType.fields { + if result != "" { + result += ", " + } + result += field.name + " : " + field.typeName + } + } + // add callback + if result != "" { + result += ", " + } + if let resultTypeName = method.resultTypeName { + result += "callback : @escaping (" + resultTypeName + "?, Swift.Error?)->()" + } else { + result += "callback : @escaping (Swift.Error?)->()" + } + return result +} + +func protocolParametersDeclaration(_ value: Any?) -> String { + let method : ServiceMethod = value as! ServiceMethod + var result = "" + if let parametersTypeName = method.parametersTypeName { + result = "_ parameters : " + parametersTypeName + } + return result +} + +func protocolReturnDeclaration(_ value: Any?) -> String { + let method : ServiceMethod = value as! ServiceMethod + var result = "" + if let responsesTypeName = method.responsesTypeName { + result = "-> " + responsesTypeName + } + return result +} + +func parameterFieldNames(_ value: Any?) -> String { + let method : ServiceMethod = value as! ServiceMethod + var result = "" + if let parametersType = method.parametersType { + for field in parametersType.fields { + if result != "" { + result += ", " + } + result += field.name + ":" + field.name + } + } + return result +} + +func parametersTypeFields(_ value: Any?) -> [ServiceTypeField] { + let method : ServiceMethod = value as! ServiceMethod + if let parametersType = method.parametersType { + return parametersType.fields + } else { + return [] + } +} + +func kituraPath(_ value: Any?) -> String { + let method : ServiceMethod = value as! ServiceMethod + var path = method.path + if let parametersType = method.parametersType { + for field in parametersType.fields { + if field.position == "path" { + let original = "{" + field.jsonName + "}" + let replacement = ":" + field.jsonName + path = path.replacingOccurrences(of:original, with:replacement) + } + } + } + return path +} + +func bodyParameterFieldName(_ value: Any?) -> String { + let method : ServiceMethod = value as! ServiceMethod + if let parametersType = method.parametersType { + for field in parametersType.fields { + if field.position == "body" { + return field.name + } + } + } + return "" +} + +func responsesHasFieldNamedOK(_ value: Any?) -> Bool { + let method : ServiceMethod = value as! ServiceMethod + if let responsesType = method.responsesType { + for field in responsesType.fields { + if field.name == "ok" { + return true + } + } + } + return false +} + +func responsesHasFieldNamedError(_ value: Any?) -> Bool { + let method : ServiceMethod = value as! ServiceMethod + if let responsesType = method.responsesType { + for field in responsesType.fields { + if field.name == "error" { + return true + } + } + } + return false +} + +func lowercase(_ s : String) -> String { + return s.lowercased() +} + diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/gnostic-swift-generator/io.swift b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/gnostic-swift-generator/io.swift new file mode 100644 index 000000000..f2fc40036 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/gnostic-swift-generator/io.swift @@ -0,0 +1,106 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import Foundation + +// The I/O code below is derived from Apple's swift-protobuf project. +// https://github.com/apple/swift-protobuf +// BEGIN swift-protobuf derivation + +#if os(Linux) +import Glibc +#else +import Darwin.C +#endif + +enum PluginError: Error { + /// Raised for any errors reading the input + case readFailure +} + +// Alias clib's write() so Stdout.write(bytes:) can call it. +private let _write = write + +class Stdin { + static func readall() throws -> Data { + let fd: Int32 = 0 + let buffSize = 32 + var buff = [UInt8]() + while true { + var fragment = [UInt8](repeating: 0, count: buffSize) + let count = read(fd, &fragment, buffSize) + if count < 0 { + throw PluginError.readFailure + } + if count < buffSize { + buff += fragment[0..) -> () in + _ = _write(1, p, bytes.count) + } + } +} + +struct CodePrinter { + private(set) var content = "" + private var currentIndentDepth = 0 + private var currentIndent = "" + private var atLineStart = true + + mutating func print() { + print("") + } + + mutating func print(_ text: String...) { + for t in text { + for c in t.characters { + if c == "\n" { + content.append(c) + atLineStart = true + } else { + if atLineStart { + content.append(currentIndent) + atLineStart = false + } + content.append(c) + } + } + } + content.append("\n") + atLineStart = true + } + + mutating private func resetIndent() { + currentIndent = (0.. String in return " " } .joined(separator:"") + } + + mutating func indent() { + currentIndentDepth += 1 + resetIndent() + } + mutating func outdent() { + currentIndentDepth -= 1 + resetIndent() + } +} + +// END swift-protobuf derivation diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/gnostic-swift-generator/main.swift b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/gnostic-swift-generator/main.swift new file mode 100644 index 000000000..a6e5bfb55 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/Sources/gnostic-swift-generator/main.swift @@ -0,0 +1,57 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import Foundation +import Gnostic + +func Log(_ message : String) { + FileHandle.standardError.write((message + "\n").data(using:.utf8)!) +} + +func main() throws { + + // read the code generation request + let rawRequest = try Stdin.readall() + let request = try Gnostic_Plugin_V1_Request(serializedData:rawRequest) + + var response = Gnostic_Plugin_V1_Response() + + if request.hasOpenapi2 && request.hasSurface { + let document = request.openapi2 + let surface = request.surface + + Log("\(request.surface)") + + // build the service renderer + let renderer = ServiceRenderer(surface:surface, document:document) + + // generate the desired files + var filenames : [String] + switch CommandLine.arguments[0] { + case "openapi_swift_client": + filenames = ["client.swift", "types.swift", "fetch.swift"] + case "openapi_swift_server": + filenames = ["server.swift", "types.swift"] + default: + filenames = ["client.swift", "server.swift", "types.swift", "fetch.swift"] + } + try renderer.generate(filenames:filenames, response:&response) + } + + // return the results + let serializedResponse = try response.serializedData() + Stdout.write(bytes: serializedResponse) +} + +try main() diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/compile-protos b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/compile-protos new file mode 100755 index 000000000..ef7cc52f6 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/compile-protos @@ -0,0 +1,52 @@ +#!/bin/sh +# +# Copyright 2017 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Use this script to run protoc and swift-proto to generate +# support code for gnostic protos. + +GNOSTIC=$GOPATH/src/github.com/googleapis/gnostic + +PROTOS=( + plugins/plugin.proto + OpenAPIv2/OpenAPIv2.proto + OpenAPIv3/OpenAPIv3.proto + surface/surface.proto + discovery/discovery.proto +) + +# remove old compiled pb files +rm -rf Sources/Gnostic/*.pb.swift + +# remove any prior compilations +rm -rf Sources/Gnostic/github.com + +# compile protos +for proto in "${PROTOS[@]}" +do + echo "COMPILING $proto" + protoc $GNOSTIC/$proto \ + --swift_opt=Visibility=Public \ + --swift_out=Sources/Gnostic \ + --proto_path=$GOPATH/src + +# relocate compiled protos +find Sources/Gnostic/github.com -name "*.pb.swift" -exec mv {} Sources/Gnostic \; + +# remove scaffolding of compilation +rm -rf Sources/Gnostic/github.com + +done + diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/examples/bookstore/Makefile b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/examples/bookstore/Makefile new file mode 100644 index 000000000..6d145622d --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/examples/bookstore/Makefile @@ -0,0 +1,9 @@ + +all: + rm -f gnostic-swift-generator + ln -s ../../gnostic-swift-generator + gnostic bookstore.json --swift-generator-out=Sources/Bookstore + swift build + +test: + swift test diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/examples/bookstore/Package.swift b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/examples/bookstore/Package.swift new file mode 100644 index 000000000..ba723e65d --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/examples/bookstore/Package.swift @@ -0,0 +1,26 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import PackageDescription + +let package = Package( + name: "BookstoreExample", + targets: [ + Target(name: "Server", dependencies: ["Bookstore"]), + ], + dependencies: [ + .Package(url: "https://github.com/IBM-Swift/Kitura.git", majorVersion: 1, minor: 7) + ] +) + diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/examples/bookstore/README.md b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/examples/bookstore/README.md new file mode 100644 index 000000000..ee9fb3bad --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/examples/bookstore/README.md @@ -0,0 +1,22 @@ +# Bookstore Example + +This directory contains an OpenAPI description of a simple bookstore API. + +Use this example to try the `openapi_swift_generator` plugin, which +generates Swift code that implements an API client and server for +an OpenAPI description. + +Run `make all` to build and install `openapic` and the Swift plugin. +It will generate both client and server code. The API client and +server code will be in the `Sources/Bookstore` package. + +The `Sources/Server` directory contains additional code that completes the server. +To build and run the server, do the following: + + swift build + .build/debug/Server & + +To test the service with the generated client, run `swift build`. +Tests are in the `Tests` directory and use client +code generated in `Bookstore` to verify the service. + diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/examples/bookstore/Sources/Server/main.swift b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/examples/bookstore/Sources/Server/main.swift new file mode 100644 index 000000000..f8f43cfe9 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/examples/bookstore/Sources/Server/main.swift @@ -0,0 +1,128 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import Bookstore + +class Server : Service { + private var shelves : [Int64:Shelf] = [:] + private var books : [Int64:[Int64:Book]] = [:] + private var lastShelfIndex : Int64 = 0 + private var lastBookIndex : Int64 = 0 + + // Return all shelves in the bookstore. + func listShelves () throws -> ListShelvesResponses { + let responses = ListShelvesResponses() + let response = ListShelvesResponse() + var shelves : [Shelf] = [] + for pair in self.shelves { + shelves.append(pair.value) + } + response.shelves = shelves + responses.ok = response + return responses + } + // Create a new shelf in the bookstore. + func createShelf (_ parameters : CreateShelfParameters) throws -> CreateShelfResponses { + lastShelfIndex += 1 + let shelf = parameters.shelf + shelf.name = "shelves/\(lastShelfIndex)" + shelves[lastShelfIndex] = shelf + let responses = CreateShelfResponses() + responses.ok = shelf + return responses + } + // Delete all shelves. + func deleteShelves () throws { + shelves = [:] + books = [:] + lastShelfIndex = 0 + lastBookIndex = 0 + } + // Get a single shelf resource with the given ID. + func getShelf (_ parameters : GetShelfParameters) throws -> GetShelfResponses { + let responses = GetShelfResponses() + if let shelf : Shelf = shelves[parameters.shelf] { + responses.ok = shelf + } else { + let err = Error() + err.code = 404 + err.message = "not found" + responses.error = err + } + return responses + } + // Delete a single shelf with the given ID. + func deleteShelf (_ parameters : DeleteShelfParameters) throws { + shelves[parameters.shelf] = nil + books[parameters.shelf] = nil + } + // Return all books in a shelf with the given ID. + func listBooks (_ parameters : ListBooksParameters) throws -> ListBooksResponses { + let responses = ListBooksResponses() + let response = ListBooksResponse() + var books : [Book] = [] + if let shelfBooks = self.books[parameters.shelf] { + for pair in shelfBooks { + books.append(pair.value) + } + } + response.books = books + responses.ok = response + return responses + } + // Create a new book on the shelf. + func createBook (_ parameters : CreateBookParameters) throws -> CreateBookResponses { + let responses = CreateBookResponses() + lastBookIndex += 1 + let shelf = parameters.shelf + let book = parameters.book + book.name = "shelves/\(shelf)/books/\(lastBookIndex)" + if var shelfBooks = self.books[shelf] { + shelfBooks[lastBookIndex] = book + self.books[shelf] = shelfBooks + } else { + var shelfBooks : [Int64:Book] = [:] + shelfBooks[lastBookIndex] = book + self.books[shelf] = shelfBooks + } + responses.ok = book + return responses + } + // Get a single book with a given ID from a shelf. + func getBook (_ parameters : GetBookParameters) throws -> GetBookResponses { + let responses = GetBookResponses() + if let shelfBooks = self.books[parameters.shelf], + let book = shelfBooks[parameters.book] { + responses.ok = book + } else { + let err = Error() + err.code = 404 + err.message = "not found" + responses.error = err + } + return responses + } + // Delete a single book with a given ID from a shelf. + func deleteBook (_ parameters : DeleteBookParameters) throws { + if var shelfBooks = self.books[parameters.shelf] { + shelfBooks[parameters.book] = nil + self.books[parameters.shelf] = shelfBooks + } + } +} + +initialize(service:Server(), port:8080) + +run() + diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/examples/bookstore/Tests/BookstoreTests/BookstoreTests.swift b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/examples/bookstore/Tests/BookstoreTests/BookstoreTests.swift new file mode 100644 index 000000000..1cd2afec0 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/examples/bookstore/Tests/BookstoreTests/BookstoreTests.swift @@ -0,0 +1,208 @@ +/* + * + * Copyright 2017, Google Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following disclaimer + * in the documentation and/or other materials provided with the + * distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived from + * this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + */ +import XCTest +import Foundation +@testable import Bookstore + +func Log(_ message : String) { + FileHandle.standardError.write((message + "\n").data(using:.utf8)!) +} + +let service = "http://localhost:8080" + +class BookstoreTests: XCTestCase { + + func testBasic() { + // create a client + let b = Bookstore.Client(service:service) + Log("// reset the service by deleting all shelves") + do { + try b.deleteShelves() + } catch (let error) { + XCTFail("\(error)") + } + Log("// verify that the service has no shelves") + do { + let response = try b.listShelves() + XCTAssertEqual(response.shelves.count, 0) + } catch (let error) { + XCTFail("\(error)") + } + Log("// attempting to get a shelf should return an error") + do { + let _ = try b.getShelf(shelf:1) + XCTFail("server error") + } catch { + } + Log("// attempting to get a book should return an error") + do { + let _ = try b.getBook(shelf:1, book:2) + } catch { + } + Log("// add a shelf") + do { + let shelf = Shelf() + shelf.theme = "mysteries" + let response = try b.createShelf(shelf:shelf) + if (response.name != "shelves/1") || + (response.theme != "mysteries") { + XCTFail("mismatch") + } + } catch (let error) { + XCTFail("\(error)") + } + Log("// add another shelf") + do { + let shelf = Shelf() + shelf.theme = "comedies" + let response = try b.createShelf(shelf:shelf) + if (response.name != "shelves/2") || + (response.theme != "comedies") { + XCTFail("mismatch") + } + } catch (let error) { + XCTFail("\(error)") + } + Log("// get the first shelf that was added") + do { + let response = try b.getShelf(shelf:1) + if (response.name != "shelves/1") || + (response.theme != "mysteries") { + XCTFail("mismatch") + } + } catch (let error) { + XCTFail("\(error)") + } + Log("// list shelves and verify that there are 2") + do { + let response = try b.listShelves() + XCTAssertEqual(response.shelves.count, 2) + } catch (let error) { + XCTFail("\(error)") + } + Log("// delete a shelf") + do { + try b.deleteShelf(shelf:2) + } catch (let error) { + XCTFail("\(error)") + } + Log("// list shelves and verify that there is only 1") + do { + let response = try b.listShelves() + XCTAssertEqual(response.shelves.count, 1) + } catch (let error) { + XCTFail("\(error)") + } + Log("// list books on a shelf, verify that there are none") + do { + let response = try b.listBooks(shelf:1) + XCTAssertEqual(response.books.count, 0) + } catch (let error) { + XCTFail("\(error)") + } + Log("// create a book") + do { + let book = Book() + book.author = "Agatha Christie" + book.title = "And Then There Were None" + let _ = try b.createBook(shelf:1, book:book) + } catch (let error) { + XCTFail("\(error)") + } + Log("// create another book") + do { + let book = Book() + book.author = "Agatha Christie" + book.title = "Murder on the Orient Express" + let _ = try b.createBook(shelf:1, book:book) + } catch (let error) { + XCTFail("\(error)") + } + Log("// get the first book that was added") + do { + let response = try b.getBook(shelf:1, book:1) + if (response.author != "Agatha Christie") || + (response.title != "And Then There Were None") { + XCTFail("mismatch") + } + } catch (let error) { + XCTFail("\(error)") + } + Log("// list the books on a shelf and verify that there are 2") + do { + let response = try b.listBooks(shelf:1) + XCTAssertEqual(response.books.count, 2) + } catch (let error) { + XCTFail("\(error)") + } + Log("// delete a book") + do { + try b.deleteBook(shelf:1, book:2) + } catch (let error) { + XCTFail("\(error)") + } + Log("// list the books on a shelf and verify that is only 1") + do { + let response = try b.listBooks(shelf:1) + XCTAssertEqual(response.books.count, 1) + } catch (let error) { + XCTFail("\(error)") + } + Log("// verify the handling of a badly-formed request") + var path = service + path = path + "/shelves" + guard let url = URL(string:path) else { + XCTFail("Failed to construct URL") + return + } + var request = URLRequest(url:url) + request.httpMethod = "POST" + request.httpBody = "".data(using:.utf8) + let (_, response, _) = fetch(request) + // we expect a 400 (Bad Request) code + if let response = response { + XCTAssertEqual(response.statusCode, 400) + } else { + // Failed requests are returning nil responses on Linux. For now we'll say that is OK. + //XCTFail("Null response for bad request") + } + } +} + +extension BookstoreTests { + static var allTests : [(String, (BookstoreTests) -> () throws -> Void)] { + return [ + ("testBasic", testBasic), + ] + } +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/examples/bookstore/Tests/LinuxMain.swift b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/examples/bookstore/Tests/LinuxMain.swift new file mode 100644 index 000000000..5b1e58008 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/examples/bookstore/Tests/LinuxMain.swift @@ -0,0 +1,38 @@ +/* + * + * Copyright 2017, Google Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following disclaimer + * in the documentation and/or other materials provided with the + * distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived from + * this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + */ +import XCTest +@testable import BookstoreTests + +XCTMain([ + testCase(BookstoreTests.allTests), +]) diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/examples/bookstore/bookstore.json b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/examples/bookstore/bookstore.json new file mode 100644 index 000000000..c823cdedf --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-generator/examples/bookstore/bookstore.json @@ -0,0 +1,357 @@ +{ + "swagger": "2.0", + "info": { + "description": "A simple Bookstore API example.", + "title": "Bookstore", + "version": "1.0.0" + }, + "host": "generated-bookstore.appspot.com", + "basePath": "/", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "schemes": [ + "https" + ], + "paths": { + "/shelves": { + "get": { + "description": "Return all shelves in the bookstore.", + "operationId": "listShelves", + "produces": [ + "application/json" + ], + "responses": { + "200": { + "description": "List of shelves in the bookstore.", + "schema": { + "$ref": "#/definitions/listShelvesResponse" + } + } + }, + "security": [ + + ] + }, + "post": { + "description": "Create a new shelf in the bookstore.", + "operationId": "createShelf", + "parameters": [ + { + "description": "A shelf resource to create.", + "in": "body", + "name": "shelf", + "required": true, + "schema": { + "$ref": "#/definitions/shelf" + } + } + ], + "produces": [ + "application/json" + ], + "responses": { + "200": { + "description": "A newly created shelf resource.", + "schema": { + "$ref": "#/definitions/shelf" + } + } + } + }, + "delete": { + "description": "Delete all shelves.", + "operationId": "deleteShelves", + "responses": { + "default": { + "description": "An empty response body." + } + } + } + }, + "/shelves/{shelf}": { + "get": { + "description": "Get a single shelf resource with the given ID.", + "operationId": "getShelf", + "parameters": [ + { + "description": "ID of the shelf to get.", + "format": "int64", + "in": "path", + "name": "shelf", + "required": true, + "type": "integer" + } + ], + "produces": [ + "application/json" + ], + "responses": { + "200": { + "description": "A shelf resource.", + "schema": { + "$ref": "#/definitions/shelf" + } + }, + "default": { + "description": "unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + }, + "delete": { + "description": "Delete a single shelf with the given ID.", + "operationId": "deleteShelf", + "parameters": [ + { + "description": "ID of the shelf to delete.", + "format": "int64", + "in": "path", + "name": "shelf", + "required": true, + "type": "integer" + } + ], + "responses": { + "default": { + "description": "An empty response body." + } + } + } + }, + "/shelves/{shelf}/books": { + "get": { + "description": "Return all books in a shelf with the given ID.", + "operationId": "listBooks", + "parameters": [ + { + "description": "ID of the shelf whose books should be returned.", + "format": "int64", + "in": "path", + "name": "shelf", + "required": true, + "type": "integer" + } + ], + "produces": [ + "application/json" + ], + "responses": { + "200": { + "description": "List of books on the specified shelf.", + "schema": { + "$ref": "#/definitions/listBooksResponse" + } + }, + "default": { + "description": "unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + }, + "post": { + "description": "Create a new book on the shelf.", + "operationId": "createBook", + "parameters": [ + { + "description": "ID of the shelf where the book should be created.", + "format": "int64", + "in": "path", + "name": "shelf", + "required": true, + "type": "integer" + }, + { + "description": "Book to create.", + "in": "body", + "name": "book", + "required": true, + "schema": { + "$ref": "#/definitions/book" + } + } + ], + "produces": [ + "application/json" + ], + "responses": { + "200": { + "description": "A newly created book resource.", + "schema": { + "$ref": "#/definitions/book" + } + }, + "default": { + "description": "unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + } + }, + "/shelves/{shelf}/books/{book}": { + "get": { + "description": "Get a single book with a given ID from a shelf.", + "operationId": "getBook", + "parameters": [ + { + "description": "ID of the shelf from which to get the book.", + "format": "int64", + "in": "path", + "name": "shelf", + "required": true, + "type": "integer" + }, + { + "description": "ID of the book to get from the shelf.", + "format": "int64", + "in": "path", + "name": "book", + "required": true, + "type": "integer" + } + ], + "produces": [ + "application/json" + ], + "responses": { + "200": { + "description": "A book resource.", + "schema": { + "$ref": "#/definitions/book" + } + }, + "default": { + "description": "unexpected error", + "schema": { + "$ref": "#/definitions/Error" + } + } + } + }, + "delete": { + "description": "Delete a single book with a given ID from a shelf.", + "operationId": "deleteBook", + "parameters": [ + { + "description": "ID of the shelf from which to delete the book.", + "format": "int64", + "in": "path", + "name": "shelf", + "required": true, + "type": "integer" + }, + { + "description": "ID of the book to delete from the shelf.", + "format": "int64", + "in": "path", + "name": "book", + "required": true, + "type": "integer" + } + ], + "responses": { + "default": { + "description": "An empty response body." + } + } + } + } + }, + "definitions": { + "book": { + "properties": { + "author": { + "type": "string" + }, + "name": { + "type": "string" + }, + "title": { + "type": "string" + } + }, + "required": [ + "name", + "author", + "title" + ] + }, + "listBooksResponse": { + "properties": { + "books": { + "items": { + "$ref": "#/definitions/book" + }, + "type": "array" + } + }, + "required": [ + "books" + ], + "type": "object" + }, + "listShelvesResponse": { + "properties": { + "shelves": { + "items": { + "$ref": "#/definitions/shelf" + }, + "type": "array" + } + }, + "type": "object" + }, + "shelf": { + "properties": { + "name": { + "type": "string" + }, + "theme": { + "type": "string" + } + }, + "required": [ + "name", + "theme" + ] + }, + "error": { + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + } + } + } + }, + "security": [ + { + "api_key": [ + + ] + } + ], + "securityDefinitions": { + "api_key": { + "in": "query", + "name": "key", + "type": "apiKey" + } + } +} \ No newline at end of file diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-sample/Makefile b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-sample/Makefile new file mode 100644 index 000000000..8b67d8d33 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-sample/Makefile @@ -0,0 +1,8 @@ + +all: + swift build + +clean : + rm -rf Packages + rm -rf .build + rm -f Package.pins Package.resolved diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-sample/Package.swift b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-sample/Package.swift new file mode 100644 index 000000000..55f5ad611 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-sample/Package.swift @@ -0,0 +1,26 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import PackageDescription + +let package = Package( + name: "gnostic-swift-sample", + targets: [ + Target(name: "gnostic-swift-sample", dependencies: [ "Gnostic" ]), + Target(name: "Gnostic") + ], + dependencies: [ + .Package(url: "https://github.com/apple/swift-protobuf.git", Version(0,9,904)) + ] +) diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-sample/Sources/Gnostic/OpenAPIv2.pb.swift b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-sample/Sources/Gnostic/OpenAPIv2.pb.swift new file mode 100644 index 000000000..9001d43eb --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-sample/Sources/Gnostic/OpenAPIv2.pb.swift @@ -0,0 +1,8213 @@ +// DO NOT EDIT. +// +// Generated by the Swift generator plugin for the protocol buffer compiler. +// Source: github.com/googleapis/gnostic/OpenAPIv2/OpenAPIv2.proto +// +// For information on using the generated types, please see the documenation: +// https://github.com/apple/swift-protobuf/ + +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// THIS FILE IS AUTOMATICALLY GENERATED. + +import Foundation +import SwiftProtobuf + +// If the compiler emits an error on this type, it is because this file +// was generated by a version of the `protoc` Swift plug-in that is +// incompatible with the version of SwiftProtobuf to which you are linking. +// Please ensure that your are building against the same version of the API +// that was used to generate this file. +fileprivate struct _GeneratedWithProtocGenSwiftVersion: SwiftProtobuf.ProtobufAPIVersionCheck { + struct _2: SwiftProtobuf.ProtobufAPIVersion_2 {} + typealias Version = _2 +} + +public struct Openapi_V2_AdditionalPropertiesItem: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".AdditionalPropertiesItem" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var schema: Openapi_V2_Schema { + get { + if case .schema(let v)? = _storage._oneof {return v} + return Openapi_V2_Schema() + } + set {_uniqueStorage()._oneof = .schema(newValue)} + } + + public var boolean: Bool { + get { + if case .boolean(let v)? = _storage._oneof {return v} + return false + } + set {_uniqueStorage()._oneof = .boolean(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case schema(Openapi_V2_Schema) + case boolean(Bool) + + public static func ==(lhs: Openapi_V2_AdditionalPropertiesItem.OneOf_Oneof, rhs: Openapi_V2_AdditionalPropertiesItem.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.schema(let l), .schema(let r)): return l == r + case (.boolean(let l), .boolean(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V2_Schema? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .schema(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .schema(v)} + case 2: + if _storage._oneof != nil {try decoder.handleConflictingOneOf()} + var v: Bool? + try decoder.decodeSingularBoolField(value: &v) + if let v = v {_storage._oneof = .boolean(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .schema(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .boolean(let v)?: + try visitor.visitSingularBoolField(value: v, fieldNumber: 2) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_Any: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Any" + + public var value: SwiftProtobuf.Google_Protobuf_Any { + get {return _storage._value ?? SwiftProtobuf.Google_Protobuf_Any()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var yaml: String { + get {return _storage._yaml} + set {_uniqueStorage()._yaml = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularMessageField(value: &_storage._value) + case 2: try decoder.decodeSingularStringField(value: &_storage._yaml) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + } + if !_storage._yaml.isEmpty { + try visitor.visitSingularStringField(value: _storage._yaml, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_ApiKeySecurity: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".ApiKeySecurity" + + public var type: String = String() + + public var name: String = String() + + public var `in`: String = String() + + public var description_p: String = String() + + public var vendorExtension: [Openapi_V2_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.type) + case 2: try decoder.decodeSingularStringField(value: &self.name) + case 3: try decoder.decodeSingularStringField(value: &self.`in`) + case 4: try decoder.decodeSingularStringField(value: &self.description_p) + case 5: try decoder.decodeRepeatedMessageField(value: &self.vendorExtension) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.type.isEmpty { + try visitor.visitSingularStringField(value: self.type, fieldNumber: 1) + } + if !self.name.isEmpty { + try visitor.visitSingularStringField(value: self.name, fieldNumber: 2) + } + if !self.`in`.isEmpty { + try visitor.visitSingularStringField(value: self.`in`, fieldNumber: 3) + } + if !self.description_p.isEmpty { + try visitor.visitSingularStringField(value: self.description_p, fieldNumber: 4) + } + if !self.vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: self.vendorExtension, fieldNumber: 5) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V2_BasicAuthenticationSecurity: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".BasicAuthenticationSecurity" + + public var type: String = String() + + public var description_p: String = String() + + public var vendorExtension: [Openapi_V2_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.type) + case 2: try decoder.decodeSingularStringField(value: &self.description_p) + case 3: try decoder.decodeRepeatedMessageField(value: &self.vendorExtension) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.type.isEmpty { + try visitor.visitSingularStringField(value: self.type, fieldNumber: 1) + } + if !self.description_p.isEmpty { + try visitor.visitSingularStringField(value: self.description_p, fieldNumber: 2) + } + if !self.vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: self.vendorExtension, fieldNumber: 3) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V2_BodyParameter: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".BodyParameter" + + /// A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed. + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + /// The name of the parameter. + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Determines the location of the parameter. + public var `in`: String { + get {return _storage._in} + set {_uniqueStorage()._in = newValue} + } + + /// Determines whether or not this parameter is required or optional. + public var required: Bool { + get {return _storage._required} + set {_uniqueStorage()._required = newValue} + } + + public var schema: Openapi_V2_Schema { + get {return _storage._schema ?? Openapi_V2_Schema()} + set {_uniqueStorage()._schema = newValue} + } + /// Returns true if `schema` has been explicitly set. + public var hasSchema: Bool {return _storage._schema != nil} + /// Clears the value of `schema`. Subsequent reads from it will return its default value. + public mutating func clearSchema() {_storage._schema = nil} + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 2: try decoder.decodeSingularStringField(value: &_storage._name) + case 3: try decoder.decodeSingularStringField(value: &_storage._in) + case 4: try decoder.decodeSingularBoolField(value: &_storage._required) + case 5: try decoder.decodeSingularMessageField(value: &_storage._schema) + case 6: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 1) + } + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 2) + } + if !_storage._in.isEmpty { + try visitor.visitSingularStringField(value: _storage._in, fieldNumber: 3) + } + if _storage._required != false { + try visitor.visitSingularBoolField(value: _storage._required, fieldNumber: 4) + } + if let v = _storage._schema { + try visitor.visitSingularMessageField(value: v, fieldNumber: 5) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 6) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Contact information for the owners of the API. +public struct Openapi_V2_Contact: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Contact" + + /// The identifying name of the contact person/organization. + public var name: String = String() + + /// The URL pointing to the contact information. + public var url: String = String() + + /// The email address of the contact person/organization. + public var email: String = String() + + public var vendorExtension: [Openapi_V2_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.name) + case 2: try decoder.decodeSingularStringField(value: &self.url) + case 3: try decoder.decodeSingularStringField(value: &self.email) + case 4: try decoder.decodeRepeatedMessageField(value: &self.vendorExtension) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.name.isEmpty { + try visitor.visitSingularStringField(value: self.name, fieldNumber: 1) + } + if !self.url.isEmpty { + try visitor.visitSingularStringField(value: self.url, fieldNumber: 2) + } + if !self.email.isEmpty { + try visitor.visitSingularStringField(value: self.email, fieldNumber: 3) + } + if !self.vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: self.vendorExtension, fieldNumber: 4) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V2_Default: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Default" + + public var additionalProperties: [Openapi_V2_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// One or more JSON objects describing the schemas being consumed and produced by the API. +public struct Openapi_V2_Definitions: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Definitions" + + public var additionalProperties: [Openapi_V2_NamedSchema] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V2_Document: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Document" + + /// The Swagger version of this document. + public var swagger: String { + get {return _storage._swagger} + set {_uniqueStorage()._swagger = newValue} + } + + public var info: Openapi_V2_Info { + get {return _storage._info ?? Openapi_V2_Info()} + set {_uniqueStorage()._info = newValue} + } + /// Returns true if `info` has been explicitly set. + public var hasInfo: Bool {return _storage._info != nil} + /// Clears the value of `info`. Subsequent reads from it will return its default value. + public mutating func clearInfo() {_storage._info = nil} + + /// The host (name or ip) of the API. Example: 'swagger.io' + public var host: String { + get {return _storage._host} + set {_uniqueStorage()._host = newValue} + } + + /// The base path to the API. Example: '/api'. + public var basePath: String { + get {return _storage._basePath} + set {_uniqueStorage()._basePath = newValue} + } + + /// The transfer protocol of the API. + public var schemes: [String] { + get {return _storage._schemes} + set {_uniqueStorage()._schemes = newValue} + } + + /// A list of MIME types accepted by the API. + public var consumes: [String] { + get {return _storage._consumes} + set {_uniqueStorage()._consumes = newValue} + } + + /// A list of MIME types the API can produce. + public var produces: [String] { + get {return _storage._produces} + set {_uniqueStorage()._produces = newValue} + } + + public var paths: Openapi_V2_Paths { + get {return _storage._paths ?? Openapi_V2_Paths()} + set {_uniqueStorage()._paths = newValue} + } + /// Returns true if `paths` has been explicitly set. + public var hasPaths: Bool {return _storage._paths != nil} + /// Clears the value of `paths`. Subsequent reads from it will return its default value. + public mutating func clearPaths() {_storage._paths = nil} + + public var definitions: Openapi_V2_Definitions { + get {return _storage._definitions ?? Openapi_V2_Definitions()} + set {_uniqueStorage()._definitions = newValue} + } + /// Returns true if `definitions` has been explicitly set. + public var hasDefinitions: Bool {return _storage._definitions != nil} + /// Clears the value of `definitions`. Subsequent reads from it will return its default value. + public mutating func clearDefinitions() {_storage._definitions = nil} + + public var parameters: Openapi_V2_ParameterDefinitions { + get {return _storage._parameters ?? Openapi_V2_ParameterDefinitions()} + set {_uniqueStorage()._parameters = newValue} + } + /// Returns true if `parameters` has been explicitly set. + public var hasParameters: Bool {return _storage._parameters != nil} + /// Clears the value of `parameters`. Subsequent reads from it will return its default value. + public mutating func clearParameters() {_storage._parameters = nil} + + public var responses: Openapi_V2_ResponseDefinitions { + get {return _storage._responses ?? Openapi_V2_ResponseDefinitions()} + set {_uniqueStorage()._responses = newValue} + } + /// Returns true if `responses` has been explicitly set. + public var hasResponses: Bool {return _storage._responses != nil} + /// Clears the value of `responses`. Subsequent reads from it will return its default value. + public mutating func clearResponses() {_storage._responses = nil} + + public var security: [Openapi_V2_SecurityRequirement] { + get {return _storage._security} + set {_uniqueStorage()._security = newValue} + } + + public var securityDefinitions: Openapi_V2_SecurityDefinitions { + get {return _storage._securityDefinitions ?? Openapi_V2_SecurityDefinitions()} + set {_uniqueStorage()._securityDefinitions = newValue} + } + /// Returns true if `securityDefinitions` has been explicitly set. + public var hasSecurityDefinitions: Bool {return _storage._securityDefinitions != nil} + /// Clears the value of `securityDefinitions`. Subsequent reads from it will return its default value. + public mutating func clearSecurityDefinitions() {_storage._securityDefinitions = nil} + + public var tags: [Openapi_V2_Tag] { + get {return _storage._tags} + set {_uniqueStorage()._tags = newValue} + } + + public var externalDocs: Openapi_V2_ExternalDocs { + get {return _storage._externalDocs ?? Openapi_V2_ExternalDocs()} + set {_uniqueStorage()._externalDocs = newValue} + } + /// Returns true if `externalDocs` has been explicitly set. + public var hasExternalDocs: Bool {return _storage._externalDocs != nil} + /// Clears the value of `externalDocs`. Subsequent reads from it will return its default value. + public mutating func clearExternalDocs() {_storage._externalDocs = nil} + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._swagger) + case 2: try decoder.decodeSingularMessageField(value: &_storage._info) + case 3: try decoder.decodeSingularStringField(value: &_storage._host) + case 4: try decoder.decodeSingularStringField(value: &_storage._basePath) + case 5: try decoder.decodeRepeatedStringField(value: &_storage._schemes) + case 6: try decoder.decodeRepeatedStringField(value: &_storage._consumes) + case 7: try decoder.decodeRepeatedStringField(value: &_storage._produces) + case 8: try decoder.decodeSingularMessageField(value: &_storage._paths) + case 9: try decoder.decodeSingularMessageField(value: &_storage._definitions) + case 10: try decoder.decodeSingularMessageField(value: &_storage._parameters) + case 11: try decoder.decodeSingularMessageField(value: &_storage._responses) + case 12: try decoder.decodeRepeatedMessageField(value: &_storage._security) + case 13: try decoder.decodeSingularMessageField(value: &_storage._securityDefinitions) + case 14: try decoder.decodeRepeatedMessageField(value: &_storage._tags) + case 15: try decoder.decodeSingularMessageField(value: &_storage._externalDocs) + case 16: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._swagger.isEmpty { + try visitor.visitSingularStringField(value: _storage._swagger, fieldNumber: 1) + } + if let v = _storage._info { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + if !_storage._host.isEmpty { + try visitor.visitSingularStringField(value: _storage._host, fieldNumber: 3) + } + if !_storage._basePath.isEmpty { + try visitor.visitSingularStringField(value: _storage._basePath, fieldNumber: 4) + } + if !_storage._schemes.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._schemes, fieldNumber: 5) + } + if !_storage._consumes.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._consumes, fieldNumber: 6) + } + if !_storage._produces.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._produces, fieldNumber: 7) + } + if let v = _storage._paths { + try visitor.visitSingularMessageField(value: v, fieldNumber: 8) + } + if let v = _storage._definitions { + try visitor.visitSingularMessageField(value: v, fieldNumber: 9) + } + if let v = _storage._parameters { + try visitor.visitSingularMessageField(value: v, fieldNumber: 10) + } + if let v = _storage._responses { + try visitor.visitSingularMessageField(value: v, fieldNumber: 11) + } + if !_storage._security.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._security, fieldNumber: 12) + } + if let v = _storage._securityDefinitions { + try visitor.visitSingularMessageField(value: v, fieldNumber: 13) + } + if !_storage._tags.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._tags, fieldNumber: 14) + } + if let v = _storage._externalDocs { + try visitor.visitSingularMessageField(value: v, fieldNumber: 15) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 16) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_Examples: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Examples" + + public var additionalProperties: [Openapi_V2_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// information about external documentation +public struct Openapi_V2_ExternalDocs: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".ExternalDocs" + + public var description_p: String = String() + + public var url: String = String() + + public var vendorExtension: [Openapi_V2_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.description_p) + case 2: try decoder.decodeSingularStringField(value: &self.url) + case 3: try decoder.decodeRepeatedMessageField(value: &self.vendorExtension) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.description_p.isEmpty { + try visitor.visitSingularStringField(value: self.description_p, fieldNumber: 1) + } + if !self.url.isEmpty { + try visitor.visitSingularStringField(value: self.url, fieldNumber: 2) + } + if !self.vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: self.vendorExtension, fieldNumber: 3) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// A deterministic version of a JSON Schema object. +public struct Openapi_V2_FileSchema: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".FileSchema" + + public var format: String { + get {return _storage._format} + set {_uniqueStorage()._format = newValue} + } + + public var title: String { + get {return _storage._title} + set {_uniqueStorage()._title = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var `default`: Openapi_V2_Any { + get {return _storage._default ?? Openapi_V2_Any()} + set {_uniqueStorage()._default = newValue} + } + /// Returns true if ``default`` has been explicitly set. + public var hasDefault: Bool {return _storage._default != nil} + /// Clears the value of ``default``. Subsequent reads from it will return its default value. + public mutating func clearDefault() {_storage._default = nil} + + public var required: [String] { + get {return _storage._required} + set {_uniqueStorage()._required = newValue} + } + + public var type: String { + get {return _storage._type} + set {_uniqueStorage()._type = newValue} + } + + public var readOnly: Bool { + get {return _storage._readOnly} + set {_uniqueStorage()._readOnly = newValue} + } + + public var externalDocs: Openapi_V2_ExternalDocs { + get {return _storage._externalDocs ?? Openapi_V2_ExternalDocs()} + set {_uniqueStorage()._externalDocs = newValue} + } + /// Returns true if `externalDocs` has been explicitly set. + public var hasExternalDocs: Bool {return _storage._externalDocs != nil} + /// Clears the value of `externalDocs`. Subsequent reads from it will return its default value. + public mutating func clearExternalDocs() {_storage._externalDocs = nil} + + public var example: Openapi_V2_Any { + get {return _storage._example ?? Openapi_V2_Any()} + set {_uniqueStorage()._example = newValue} + } + /// Returns true if `example` has been explicitly set. + public var hasExample: Bool {return _storage._example != nil} + /// Clears the value of `example`. Subsequent reads from it will return its default value. + public mutating func clearExample() {_storage._example = nil} + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._format) + case 2: try decoder.decodeSingularStringField(value: &_storage._title) + case 3: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 4: try decoder.decodeSingularMessageField(value: &_storage._default) + case 5: try decoder.decodeRepeatedStringField(value: &_storage._required) + case 6: try decoder.decodeSingularStringField(value: &_storage._type) + case 7: try decoder.decodeSingularBoolField(value: &_storage._readOnly) + case 8: try decoder.decodeSingularMessageField(value: &_storage._externalDocs) + case 9: try decoder.decodeSingularMessageField(value: &_storage._example) + case 10: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._format.isEmpty { + try visitor.visitSingularStringField(value: _storage._format, fieldNumber: 1) + } + if !_storage._title.isEmpty { + try visitor.visitSingularStringField(value: _storage._title, fieldNumber: 2) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 3) + } + if let v = _storage._default { + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + } + if !_storage._required.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._required, fieldNumber: 5) + } + if !_storage._type.isEmpty { + try visitor.visitSingularStringField(value: _storage._type, fieldNumber: 6) + } + if _storage._readOnly != false { + try visitor.visitSingularBoolField(value: _storage._readOnly, fieldNumber: 7) + } + if let v = _storage._externalDocs { + try visitor.visitSingularMessageField(value: v, fieldNumber: 8) + } + if let v = _storage._example { + try visitor.visitSingularMessageField(value: v, fieldNumber: 9) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 10) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_FormDataParameterSubSchema: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".FormDataParameterSubSchema" + + /// Determines whether or not this parameter is required or optional. + public var required: Bool { + get {return _storage._required} + set {_uniqueStorage()._required = newValue} + } + + /// Determines the location of the parameter. + public var `in`: String { + get {return _storage._in} + set {_uniqueStorage()._in = newValue} + } + + /// A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed. + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + /// The name of the parameter. + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// allows sending a parameter by name only or with an empty value. + public var allowEmptyValue: Bool { + get {return _storage._allowEmptyValue} + set {_uniqueStorage()._allowEmptyValue = newValue} + } + + public var type: String { + get {return _storage._type} + set {_uniqueStorage()._type = newValue} + } + + public var format: String { + get {return _storage._format} + set {_uniqueStorage()._format = newValue} + } + + public var items: Openapi_V2_PrimitivesItems { + get {return _storage._items ?? Openapi_V2_PrimitivesItems()} + set {_uniqueStorage()._items = newValue} + } + /// Returns true if `items` has been explicitly set. + public var hasItems: Bool {return _storage._items != nil} + /// Clears the value of `items`. Subsequent reads from it will return its default value. + public mutating func clearItems() {_storage._items = nil} + + public var collectionFormat: String { + get {return _storage._collectionFormat} + set {_uniqueStorage()._collectionFormat = newValue} + } + + public var `default`: Openapi_V2_Any { + get {return _storage._default ?? Openapi_V2_Any()} + set {_uniqueStorage()._default = newValue} + } + /// Returns true if ``default`` has been explicitly set. + public var hasDefault: Bool {return _storage._default != nil} + /// Clears the value of ``default``. Subsequent reads from it will return its default value. + public mutating func clearDefault() {_storage._default = nil} + + public var maximum: Double { + get {return _storage._maximum} + set {_uniqueStorage()._maximum = newValue} + } + + public var exclusiveMaximum: Bool { + get {return _storage._exclusiveMaximum} + set {_uniqueStorage()._exclusiveMaximum = newValue} + } + + public var minimum: Double { + get {return _storage._minimum} + set {_uniqueStorage()._minimum = newValue} + } + + public var exclusiveMinimum: Bool { + get {return _storage._exclusiveMinimum} + set {_uniqueStorage()._exclusiveMinimum = newValue} + } + + public var maxLength: Int64 { + get {return _storage._maxLength} + set {_uniqueStorage()._maxLength = newValue} + } + + public var minLength: Int64 { + get {return _storage._minLength} + set {_uniqueStorage()._minLength = newValue} + } + + public var pattern: String { + get {return _storage._pattern} + set {_uniqueStorage()._pattern = newValue} + } + + public var maxItems: Int64 { + get {return _storage._maxItems} + set {_uniqueStorage()._maxItems = newValue} + } + + public var minItems: Int64 { + get {return _storage._minItems} + set {_uniqueStorage()._minItems = newValue} + } + + public var uniqueItems: Bool { + get {return _storage._uniqueItems} + set {_uniqueStorage()._uniqueItems = newValue} + } + + public var `enum`: [Openapi_V2_Any] { + get {return _storage._enum} + set {_uniqueStorage()._enum = newValue} + } + + public var multipleOf: Double { + get {return _storage._multipleOf} + set {_uniqueStorage()._multipleOf = newValue} + } + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularBoolField(value: &_storage._required) + case 2: try decoder.decodeSingularStringField(value: &_storage._in) + case 3: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 4: try decoder.decodeSingularStringField(value: &_storage._name) + case 5: try decoder.decodeSingularBoolField(value: &_storage._allowEmptyValue) + case 6: try decoder.decodeSingularStringField(value: &_storage._type) + case 7: try decoder.decodeSingularStringField(value: &_storage._format) + case 8: try decoder.decodeSingularMessageField(value: &_storage._items) + case 9: try decoder.decodeSingularStringField(value: &_storage._collectionFormat) + case 10: try decoder.decodeSingularMessageField(value: &_storage._default) + case 11: try decoder.decodeSingularDoubleField(value: &_storage._maximum) + case 12: try decoder.decodeSingularBoolField(value: &_storage._exclusiveMaximum) + case 13: try decoder.decodeSingularDoubleField(value: &_storage._minimum) + case 14: try decoder.decodeSingularBoolField(value: &_storage._exclusiveMinimum) + case 15: try decoder.decodeSingularInt64Field(value: &_storage._maxLength) + case 16: try decoder.decodeSingularInt64Field(value: &_storage._minLength) + case 17: try decoder.decodeSingularStringField(value: &_storage._pattern) + case 18: try decoder.decodeSingularInt64Field(value: &_storage._maxItems) + case 19: try decoder.decodeSingularInt64Field(value: &_storage._minItems) + case 20: try decoder.decodeSingularBoolField(value: &_storage._uniqueItems) + case 21: try decoder.decodeRepeatedMessageField(value: &_storage._enum) + case 22: try decoder.decodeSingularDoubleField(value: &_storage._multipleOf) + case 23: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if _storage._required != false { + try visitor.visitSingularBoolField(value: _storage._required, fieldNumber: 1) + } + if !_storage._in.isEmpty { + try visitor.visitSingularStringField(value: _storage._in, fieldNumber: 2) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 3) + } + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 4) + } + if _storage._allowEmptyValue != false { + try visitor.visitSingularBoolField(value: _storage._allowEmptyValue, fieldNumber: 5) + } + if !_storage._type.isEmpty { + try visitor.visitSingularStringField(value: _storage._type, fieldNumber: 6) + } + if !_storage._format.isEmpty { + try visitor.visitSingularStringField(value: _storage._format, fieldNumber: 7) + } + if let v = _storage._items { + try visitor.visitSingularMessageField(value: v, fieldNumber: 8) + } + if !_storage._collectionFormat.isEmpty { + try visitor.visitSingularStringField(value: _storage._collectionFormat, fieldNumber: 9) + } + if let v = _storage._default { + try visitor.visitSingularMessageField(value: v, fieldNumber: 10) + } + if _storage._maximum != 0 { + try visitor.visitSingularDoubleField(value: _storage._maximum, fieldNumber: 11) + } + if _storage._exclusiveMaximum != false { + try visitor.visitSingularBoolField(value: _storage._exclusiveMaximum, fieldNumber: 12) + } + if _storage._minimum != 0 { + try visitor.visitSingularDoubleField(value: _storage._minimum, fieldNumber: 13) + } + if _storage._exclusiveMinimum != false { + try visitor.visitSingularBoolField(value: _storage._exclusiveMinimum, fieldNumber: 14) + } + if _storage._maxLength != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxLength, fieldNumber: 15) + } + if _storage._minLength != 0 { + try visitor.visitSingularInt64Field(value: _storage._minLength, fieldNumber: 16) + } + if !_storage._pattern.isEmpty { + try visitor.visitSingularStringField(value: _storage._pattern, fieldNumber: 17) + } + if _storage._maxItems != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxItems, fieldNumber: 18) + } + if _storage._minItems != 0 { + try visitor.visitSingularInt64Field(value: _storage._minItems, fieldNumber: 19) + } + if _storage._uniqueItems != false { + try visitor.visitSingularBoolField(value: _storage._uniqueItems, fieldNumber: 20) + } + if !_storage._enum.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._enum, fieldNumber: 21) + } + if _storage._multipleOf != 0 { + try visitor.visitSingularDoubleField(value: _storage._multipleOf, fieldNumber: 22) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 23) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_Header: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Header" + + public var type: String { + get {return _storage._type} + set {_uniqueStorage()._type = newValue} + } + + public var format: String { + get {return _storage._format} + set {_uniqueStorage()._format = newValue} + } + + public var items: Openapi_V2_PrimitivesItems { + get {return _storage._items ?? Openapi_V2_PrimitivesItems()} + set {_uniqueStorage()._items = newValue} + } + /// Returns true if `items` has been explicitly set. + public var hasItems: Bool {return _storage._items != nil} + /// Clears the value of `items`. Subsequent reads from it will return its default value. + public mutating func clearItems() {_storage._items = nil} + + public var collectionFormat: String { + get {return _storage._collectionFormat} + set {_uniqueStorage()._collectionFormat = newValue} + } + + public var `default`: Openapi_V2_Any { + get {return _storage._default ?? Openapi_V2_Any()} + set {_uniqueStorage()._default = newValue} + } + /// Returns true if ``default`` has been explicitly set. + public var hasDefault: Bool {return _storage._default != nil} + /// Clears the value of ``default``. Subsequent reads from it will return its default value. + public mutating func clearDefault() {_storage._default = nil} + + public var maximum: Double { + get {return _storage._maximum} + set {_uniqueStorage()._maximum = newValue} + } + + public var exclusiveMaximum: Bool { + get {return _storage._exclusiveMaximum} + set {_uniqueStorage()._exclusiveMaximum = newValue} + } + + public var minimum: Double { + get {return _storage._minimum} + set {_uniqueStorage()._minimum = newValue} + } + + public var exclusiveMinimum: Bool { + get {return _storage._exclusiveMinimum} + set {_uniqueStorage()._exclusiveMinimum = newValue} + } + + public var maxLength: Int64 { + get {return _storage._maxLength} + set {_uniqueStorage()._maxLength = newValue} + } + + public var minLength: Int64 { + get {return _storage._minLength} + set {_uniqueStorage()._minLength = newValue} + } + + public var pattern: String { + get {return _storage._pattern} + set {_uniqueStorage()._pattern = newValue} + } + + public var maxItems: Int64 { + get {return _storage._maxItems} + set {_uniqueStorage()._maxItems = newValue} + } + + public var minItems: Int64 { + get {return _storage._minItems} + set {_uniqueStorage()._minItems = newValue} + } + + public var uniqueItems: Bool { + get {return _storage._uniqueItems} + set {_uniqueStorage()._uniqueItems = newValue} + } + + public var `enum`: [Openapi_V2_Any] { + get {return _storage._enum} + set {_uniqueStorage()._enum = newValue} + } + + public var multipleOf: Double { + get {return _storage._multipleOf} + set {_uniqueStorage()._multipleOf = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._type) + case 2: try decoder.decodeSingularStringField(value: &_storage._format) + case 3: try decoder.decodeSingularMessageField(value: &_storage._items) + case 4: try decoder.decodeSingularStringField(value: &_storage._collectionFormat) + case 5: try decoder.decodeSingularMessageField(value: &_storage._default) + case 6: try decoder.decodeSingularDoubleField(value: &_storage._maximum) + case 7: try decoder.decodeSingularBoolField(value: &_storage._exclusiveMaximum) + case 8: try decoder.decodeSingularDoubleField(value: &_storage._minimum) + case 9: try decoder.decodeSingularBoolField(value: &_storage._exclusiveMinimum) + case 10: try decoder.decodeSingularInt64Field(value: &_storage._maxLength) + case 11: try decoder.decodeSingularInt64Field(value: &_storage._minLength) + case 12: try decoder.decodeSingularStringField(value: &_storage._pattern) + case 13: try decoder.decodeSingularInt64Field(value: &_storage._maxItems) + case 14: try decoder.decodeSingularInt64Field(value: &_storage._minItems) + case 15: try decoder.decodeSingularBoolField(value: &_storage._uniqueItems) + case 16: try decoder.decodeRepeatedMessageField(value: &_storage._enum) + case 17: try decoder.decodeSingularDoubleField(value: &_storage._multipleOf) + case 18: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 19: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._type.isEmpty { + try visitor.visitSingularStringField(value: _storage._type, fieldNumber: 1) + } + if !_storage._format.isEmpty { + try visitor.visitSingularStringField(value: _storage._format, fieldNumber: 2) + } + if let v = _storage._items { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if !_storage._collectionFormat.isEmpty { + try visitor.visitSingularStringField(value: _storage._collectionFormat, fieldNumber: 4) + } + if let v = _storage._default { + try visitor.visitSingularMessageField(value: v, fieldNumber: 5) + } + if _storage._maximum != 0 { + try visitor.visitSingularDoubleField(value: _storage._maximum, fieldNumber: 6) + } + if _storage._exclusiveMaximum != false { + try visitor.visitSingularBoolField(value: _storage._exclusiveMaximum, fieldNumber: 7) + } + if _storage._minimum != 0 { + try visitor.visitSingularDoubleField(value: _storage._minimum, fieldNumber: 8) + } + if _storage._exclusiveMinimum != false { + try visitor.visitSingularBoolField(value: _storage._exclusiveMinimum, fieldNumber: 9) + } + if _storage._maxLength != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxLength, fieldNumber: 10) + } + if _storage._minLength != 0 { + try visitor.visitSingularInt64Field(value: _storage._minLength, fieldNumber: 11) + } + if !_storage._pattern.isEmpty { + try visitor.visitSingularStringField(value: _storage._pattern, fieldNumber: 12) + } + if _storage._maxItems != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxItems, fieldNumber: 13) + } + if _storage._minItems != 0 { + try visitor.visitSingularInt64Field(value: _storage._minItems, fieldNumber: 14) + } + if _storage._uniqueItems != false { + try visitor.visitSingularBoolField(value: _storage._uniqueItems, fieldNumber: 15) + } + if !_storage._enum.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._enum, fieldNumber: 16) + } + if _storage._multipleOf != 0 { + try visitor.visitSingularDoubleField(value: _storage._multipleOf, fieldNumber: 17) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 18) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 19) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_HeaderParameterSubSchema: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".HeaderParameterSubSchema" + + /// Determines whether or not this parameter is required or optional. + public var required: Bool { + get {return _storage._required} + set {_uniqueStorage()._required = newValue} + } + + /// Determines the location of the parameter. + public var `in`: String { + get {return _storage._in} + set {_uniqueStorage()._in = newValue} + } + + /// A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed. + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + /// The name of the parameter. + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + public var type: String { + get {return _storage._type} + set {_uniqueStorage()._type = newValue} + } + + public var format: String { + get {return _storage._format} + set {_uniqueStorage()._format = newValue} + } + + public var items: Openapi_V2_PrimitivesItems { + get {return _storage._items ?? Openapi_V2_PrimitivesItems()} + set {_uniqueStorage()._items = newValue} + } + /// Returns true if `items` has been explicitly set. + public var hasItems: Bool {return _storage._items != nil} + /// Clears the value of `items`. Subsequent reads from it will return its default value. + public mutating func clearItems() {_storage._items = nil} + + public var collectionFormat: String { + get {return _storage._collectionFormat} + set {_uniqueStorage()._collectionFormat = newValue} + } + + public var `default`: Openapi_V2_Any { + get {return _storage._default ?? Openapi_V2_Any()} + set {_uniqueStorage()._default = newValue} + } + /// Returns true if ``default`` has been explicitly set. + public var hasDefault: Bool {return _storage._default != nil} + /// Clears the value of ``default``. Subsequent reads from it will return its default value. + public mutating func clearDefault() {_storage._default = nil} + + public var maximum: Double { + get {return _storage._maximum} + set {_uniqueStorage()._maximum = newValue} + } + + public var exclusiveMaximum: Bool { + get {return _storage._exclusiveMaximum} + set {_uniqueStorage()._exclusiveMaximum = newValue} + } + + public var minimum: Double { + get {return _storage._minimum} + set {_uniqueStorage()._minimum = newValue} + } + + public var exclusiveMinimum: Bool { + get {return _storage._exclusiveMinimum} + set {_uniqueStorage()._exclusiveMinimum = newValue} + } + + public var maxLength: Int64 { + get {return _storage._maxLength} + set {_uniqueStorage()._maxLength = newValue} + } + + public var minLength: Int64 { + get {return _storage._minLength} + set {_uniqueStorage()._minLength = newValue} + } + + public var pattern: String { + get {return _storage._pattern} + set {_uniqueStorage()._pattern = newValue} + } + + public var maxItems: Int64 { + get {return _storage._maxItems} + set {_uniqueStorage()._maxItems = newValue} + } + + public var minItems: Int64 { + get {return _storage._minItems} + set {_uniqueStorage()._minItems = newValue} + } + + public var uniqueItems: Bool { + get {return _storage._uniqueItems} + set {_uniqueStorage()._uniqueItems = newValue} + } + + public var `enum`: [Openapi_V2_Any] { + get {return _storage._enum} + set {_uniqueStorage()._enum = newValue} + } + + public var multipleOf: Double { + get {return _storage._multipleOf} + set {_uniqueStorage()._multipleOf = newValue} + } + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularBoolField(value: &_storage._required) + case 2: try decoder.decodeSingularStringField(value: &_storage._in) + case 3: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 4: try decoder.decodeSingularStringField(value: &_storage._name) + case 5: try decoder.decodeSingularStringField(value: &_storage._type) + case 6: try decoder.decodeSingularStringField(value: &_storage._format) + case 7: try decoder.decodeSingularMessageField(value: &_storage._items) + case 8: try decoder.decodeSingularStringField(value: &_storage._collectionFormat) + case 9: try decoder.decodeSingularMessageField(value: &_storage._default) + case 10: try decoder.decodeSingularDoubleField(value: &_storage._maximum) + case 11: try decoder.decodeSingularBoolField(value: &_storage._exclusiveMaximum) + case 12: try decoder.decodeSingularDoubleField(value: &_storage._minimum) + case 13: try decoder.decodeSingularBoolField(value: &_storage._exclusiveMinimum) + case 14: try decoder.decodeSingularInt64Field(value: &_storage._maxLength) + case 15: try decoder.decodeSingularInt64Field(value: &_storage._minLength) + case 16: try decoder.decodeSingularStringField(value: &_storage._pattern) + case 17: try decoder.decodeSingularInt64Field(value: &_storage._maxItems) + case 18: try decoder.decodeSingularInt64Field(value: &_storage._minItems) + case 19: try decoder.decodeSingularBoolField(value: &_storage._uniqueItems) + case 20: try decoder.decodeRepeatedMessageField(value: &_storage._enum) + case 21: try decoder.decodeSingularDoubleField(value: &_storage._multipleOf) + case 22: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if _storage._required != false { + try visitor.visitSingularBoolField(value: _storage._required, fieldNumber: 1) + } + if !_storage._in.isEmpty { + try visitor.visitSingularStringField(value: _storage._in, fieldNumber: 2) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 3) + } + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 4) + } + if !_storage._type.isEmpty { + try visitor.visitSingularStringField(value: _storage._type, fieldNumber: 5) + } + if !_storage._format.isEmpty { + try visitor.visitSingularStringField(value: _storage._format, fieldNumber: 6) + } + if let v = _storage._items { + try visitor.visitSingularMessageField(value: v, fieldNumber: 7) + } + if !_storage._collectionFormat.isEmpty { + try visitor.visitSingularStringField(value: _storage._collectionFormat, fieldNumber: 8) + } + if let v = _storage._default { + try visitor.visitSingularMessageField(value: v, fieldNumber: 9) + } + if _storage._maximum != 0 { + try visitor.visitSingularDoubleField(value: _storage._maximum, fieldNumber: 10) + } + if _storage._exclusiveMaximum != false { + try visitor.visitSingularBoolField(value: _storage._exclusiveMaximum, fieldNumber: 11) + } + if _storage._minimum != 0 { + try visitor.visitSingularDoubleField(value: _storage._minimum, fieldNumber: 12) + } + if _storage._exclusiveMinimum != false { + try visitor.visitSingularBoolField(value: _storage._exclusiveMinimum, fieldNumber: 13) + } + if _storage._maxLength != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxLength, fieldNumber: 14) + } + if _storage._minLength != 0 { + try visitor.visitSingularInt64Field(value: _storage._minLength, fieldNumber: 15) + } + if !_storage._pattern.isEmpty { + try visitor.visitSingularStringField(value: _storage._pattern, fieldNumber: 16) + } + if _storage._maxItems != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxItems, fieldNumber: 17) + } + if _storage._minItems != 0 { + try visitor.visitSingularInt64Field(value: _storage._minItems, fieldNumber: 18) + } + if _storage._uniqueItems != false { + try visitor.visitSingularBoolField(value: _storage._uniqueItems, fieldNumber: 19) + } + if !_storage._enum.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._enum, fieldNumber: 20) + } + if _storage._multipleOf != 0 { + try visitor.visitSingularDoubleField(value: _storage._multipleOf, fieldNumber: 21) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 22) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_Headers: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Headers" + + public var additionalProperties: [Openapi_V2_NamedHeader] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// General information about the API. +public struct Openapi_V2_Info: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Info" + + /// A unique and precise title of the API. + public var title: String { + get {return _storage._title} + set {_uniqueStorage()._title = newValue} + } + + /// A semantic version number of the API. + public var version: String { + get {return _storage._version} + set {_uniqueStorage()._version = newValue} + } + + /// A longer description of the API. Should be different from the title. GitHub Flavored Markdown is allowed. + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + /// The terms of service for the API. + public var termsOfService: String { + get {return _storage._termsOfService} + set {_uniqueStorage()._termsOfService = newValue} + } + + public var contact: Openapi_V2_Contact { + get {return _storage._contact ?? Openapi_V2_Contact()} + set {_uniqueStorage()._contact = newValue} + } + /// Returns true if `contact` has been explicitly set. + public var hasContact: Bool {return _storage._contact != nil} + /// Clears the value of `contact`. Subsequent reads from it will return its default value. + public mutating func clearContact() {_storage._contact = nil} + + public var license: Openapi_V2_License { + get {return _storage._license ?? Openapi_V2_License()} + set {_uniqueStorage()._license = newValue} + } + /// Returns true if `license` has been explicitly set. + public var hasLicense: Bool {return _storage._license != nil} + /// Clears the value of `license`. Subsequent reads from it will return its default value. + public mutating func clearLicense() {_storage._license = nil} + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._title) + case 2: try decoder.decodeSingularStringField(value: &_storage._version) + case 3: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 4: try decoder.decodeSingularStringField(value: &_storage._termsOfService) + case 5: try decoder.decodeSingularMessageField(value: &_storage._contact) + case 6: try decoder.decodeSingularMessageField(value: &_storage._license) + case 7: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._title.isEmpty { + try visitor.visitSingularStringField(value: _storage._title, fieldNumber: 1) + } + if !_storage._version.isEmpty { + try visitor.visitSingularStringField(value: _storage._version, fieldNumber: 2) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 3) + } + if !_storage._termsOfService.isEmpty { + try visitor.visitSingularStringField(value: _storage._termsOfService, fieldNumber: 4) + } + if let v = _storage._contact { + try visitor.visitSingularMessageField(value: v, fieldNumber: 5) + } + if let v = _storage._license { + try visitor.visitSingularMessageField(value: v, fieldNumber: 6) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 7) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_ItemsItem: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".ItemsItem" + + public var schema: [Openapi_V2_Schema] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.schema) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.schema.isEmpty { + try visitor.visitRepeatedMessageField(value: self.schema, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V2_JsonReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".JsonReference" + + public var ref: String = String() + + public var description_p: String = String() + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.ref) + case 2: try decoder.decodeSingularStringField(value: &self.description_p) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.ref.isEmpty { + try visitor.visitSingularStringField(value: self.ref, fieldNumber: 1) + } + if !self.description_p.isEmpty { + try visitor.visitSingularStringField(value: self.description_p, fieldNumber: 2) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V2_License: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".License" + + /// The name of the license type. It's encouraged to use an OSI compatible license. + public var name: String = String() + + /// The URL pointing to the license. + public var url: String = String() + + public var vendorExtension: [Openapi_V2_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.name) + case 2: try decoder.decodeSingularStringField(value: &self.url) + case 3: try decoder.decodeRepeatedMessageField(value: &self.vendorExtension) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.name.isEmpty { + try visitor.visitSingularStringField(value: self.name, fieldNumber: 1) + } + if !self.url.isEmpty { + try visitor.visitSingularStringField(value: self.url, fieldNumber: 2) + } + if !self.vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: self.vendorExtension, fieldNumber: 3) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Automatically-generated message used to represent maps of Any as ordered (name,value) pairs. +public struct Openapi_V2_NamedAny: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedAny" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V2_Any { + get {return _storage._value ?? Openapi_V2_Any()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of Header as ordered (name,value) pairs. +public struct Openapi_V2_NamedHeader: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedHeader" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V2_Header { + get {return _storage._value ?? Openapi_V2_Header()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of Parameter as ordered (name,value) pairs. +public struct Openapi_V2_NamedParameter: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedParameter" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V2_Parameter { + get {return _storage._value ?? Openapi_V2_Parameter()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of PathItem as ordered (name,value) pairs. +public struct Openapi_V2_NamedPathItem: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedPathItem" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V2_PathItem { + get {return _storage._value ?? Openapi_V2_PathItem()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of Response as ordered (name,value) pairs. +public struct Openapi_V2_NamedResponse: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedResponse" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V2_Response { + get {return _storage._value ?? Openapi_V2_Response()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of ResponseValue as ordered (name,value) pairs. +public struct Openapi_V2_NamedResponseValue: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedResponseValue" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V2_ResponseValue { + get {return _storage._value ?? Openapi_V2_ResponseValue()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of Schema as ordered (name,value) pairs. +public struct Openapi_V2_NamedSchema: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedSchema" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V2_Schema { + get {return _storage._value ?? Openapi_V2_Schema()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of SecurityDefinitionsItem as ordered (name,value) pairs. +public struct Openapi_V2_NamedSecurityDefinitionsItem: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedSecurityDefinitionsItem" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V2_SecurityDefinitionsItem { + get {return _storage._value ?? Openapi_V2_SecurityDefinitionsItem()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of string as ordered (name,value) pairs. +public struct Openapi_V2_NamedString: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedString" + + /// Map key + public var name: String = String() + + /// Mapped value + public var value: String = String() + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.name) + case 2: try decoder.decodeSingularStringField(value: &self.value) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.name.isEmpty { + try visitor.visitSingularStringField(value: self.name, fieldNumber: 1) + } + if !self.value.isEmpty { + try visitor.visitSingularStringField(value: self.value, fieldNumber: 2) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Automatically-generated message used to represent maps of StringArray as ordered (name,value) pairs. +public struct Openapi_V2_NamedStringArray: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedStringArray" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V2_StringArray { + get {return _storage._value ?? Openapi_V2_StringArray()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_NonBodyParameter: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NonBodyParameter" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var headerParameterSubSchema: Openapi_V2_HeaderParameterSubSchema { + get { + if case .headerParameterSubSchema(let v)? = _storage._oneof {return v} + return Openapi_V2_HeaderParameterSubSchema() + } + set {_uniqueStorage()._oneof = .headerParameterSubSchema(newValue)} + } + + public var formDataParameterSubSchema: Openapi_V2_FormDataParameterSubSchema { + get { + if case .formDataParameterSubSchema(let v)? = _storage._oneof {return v} + return Openapi_V2_FormDataParameterSubSchema() + } + set {_uniqueStorage()._oneof = .formDataParameterSubSchema(newValue)} + } + + public var queryParameterSubSchema: Openapi_V2_QueryParameterSubSchema { + get { + if case .queryParameterSubSchema(let v)? = _storage._oneof {return v} + return Openapi_V2_QueryParameterSubSchema() + } + set {_uniqueStorage()._oneof = .queryParameterSubSchema(newValue)} + } + + public var pathParameterSubSchema: Openapi_V2_PathParameterSubSchema { + get { + if case .pathParameterSubSchema(let v)? = _storage._oneof {return v} + return Openapi_V2_PathParameterSubSchema() + } + set {_uniqueStorage()._oneof = .pathParameterSubSchema(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case headerParameterSubSchema(Openapi_V2_HeaderParameterSubSchema) + case formDataParameterSubSchema(Openapi_V2_FormDataParameterSubSchema) + case queryParameterSubSchema(Openapi_V2_QueryParameterSubSchema) + case pathParameterSubSchema(Openapi_V2_PathParameterSubSchema) + + public static func ==(lhs: Openapi_V2_NonBodyParameter.OneOf_Oneof, rhs: Openapi_V2_NonBodyParameter.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.headerParameterSubSchema(let l), .headerParameterSubSchema(let r)): return l == r + case (.formDataParameterSubSchema(let l), .formDataParameterSubSchema(let r)): return l == r + case (.queryParameterSubSchema(let l), .queryParameterSubSchema(let r)): return l == r + case (.pathParameterSubSchema(let l), .pathParameterSubSchema(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V2_HeaderParameterSubSchema? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .headerParameterSubSchema(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .headerParameterSubSchema(v)} + case 2: + var v: Openapi_V2_FormDataParameterSubSchema? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .formDataParameterSubSchema(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .formDataParameterSubSchema(v)} + case 3: + var v: Openapi_V2_QueryParameterSubSchema? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .queryParameterSubSchema(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .queryParameterSubSchema(v)} + case 4: + var v: Openapi_V2_PathParameterSubSchema? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .pathParameterSubSchema(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .pathParameterSubSchema(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .headerParameterSubSchema(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .formDataParameterSubSchema(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + case .queryParameterSubSchema(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + case .pathParameterSubSchema(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_Oauth2AccessCodeSecurity: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Oauth2AccessCodeSecurity" + + public var type: String { + get {return _storage._type} + set {_uniqueStorage()._type = newValue} + } + + public var flow: String { + get {return _storage._flow} + set {_uniqueStorage()._flow = newValue} + } + + public var scopes: Openapi_V2_Oauth2Scopes { + get {return _storage._scopes ?? Openapi_V2_Oauth2Scopes()} + set {_uniqueStorage()._scopes = newValue} + } + /// Returns true if `scopes` has been explicitly set. + public var hasScopes: Bool {return _storage._scopes != nil} + /// Clears the value of `scopes`. Subsequent reads from it will return its default value. + public mutating func clearScopes() {_storage._scopes = nil} + + public var authorizationURL: String { + get {return _storage._authorizationURL} + set {_uniqueStorage()._authorizationURL = newValue} + } + + public var tokenURL: String { + get {return _storage._tokenURL} + set {_uniqueStorage()._tokenURL = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._type) + case 2: try decoder.decodeSingularStringField(value: &_storage._flow) + case 3: try decoder.decodeSingularMessageField(value: &_storage._scopes) + case 4: try decoder.decodeSingularStringField(value: &_storage._authorizationURL) + case 5: try decoder.decodeSingularStringField(value: &_storage._tokenURL) + case 6: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 7: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._type.isEmpty { + try visitor.visitSingularStringField(value: _storage._type, fieldNumber: 1) + } + if !_storage._flow.isEmpty { + try visitor.visitSingularStringField(value: _storage._flow, fieldNumber: 2) + } + if let v = _storage._scopes { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if !_storage._authorizationURL.isEmpty { + try visitor.visitSingularStringField(value: _storage._authorizationURL, fieldNumber: 4) + } + if !_storage._tokenURL.isEmpty { + try visitor.visitSingularStringField(value: _storage._tokenURL, fieldNumber: 5) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 6) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 7) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_Oauth2ApplicationSecurity: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Oauth2ApplicationSecurity" + + public var type: String { + get {return _storage._type} + set {_uniqueStorage()._type = newValue} + } + + public var flow: String { + get {return _storage._flow} + set {_uniqueStorage()._flow = newValue} + } + + public var scopes: Openapi_V2_Oauth2Scopes { + get {return _storage._scopes ?? Openapi_V2_Oauth2Scopes()} + set {_uniqueStorage()._scopes = newValue} + } + /// Returns true if `scopes` has been explicitly set. + public var hasScopes: Bool {return _storage._scopes != nil} + /// Clears the value of `scopes`. Subsequent reads from it will return its default value. + public mutating func clearScopes() {_storage._scopes = nil} + + public var tokenURL: String { + get {return _storage._tokenURL} + set {_uniqueStorage()._tokenURL = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._type) + case 2: try decoder.decodeSingularStringField(value: &_storage._flow) + case 3: try decoder.decodeSingularMessageField(value: &_storage._scopes) + case 4: try decoder.decodeSingularStringField(value: &_storage._tokenURL) + case 5: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 6: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._type.isEmpty { + try visitor.visitSingularStringField(value: _storage._type, fieldNumber: 1) + } + if !_storage._flow.isEmpty { + try visitor.visitSingularStringField(value: _storage._flow, fieldNumber: 2) + } + if let v = _storage._scopes { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if !_storage._tokenURL.isEmpty { + try visitor.visitSingularStringField(value: _storage._tokenURL, fieldNumber: 4) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 5) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 6) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_Oauth2ImplicitSecurity: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Oauth2ImplicitSecurity" + + public var type: String { + get {return _storage._type} + set {_uniqueStorage()._type = newValue} + } + + public var flow: String { + get {return _storage._flow} + set {_uniqueStorage()._flow = newValue} + } + + public var scopes: Openapi_V2_Oauth2Scopes { + get {return _storage._scopes ?? Openapi_V2_Oauth2Scopes()} + set {_uniqueStorage()._scopes = newValue} + } + /// Returns true if `scopes` has been explicitly set. + public var hasScopes: Bool {return _storage._scopes != nil} + /// Clears the value of `scopes`. Subsequent reads from it will return its default value. + public mutating func clearScopes() {_storage._scopes = nil} + + public var authorizationURL: String { + get {return _storage._authorizationURL} + set {_uniqueStorage()._authorizationURL = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._type) + case 2: try decoder.decodeSingularStringField(value: &_storage._flow) + case 3: try decoder.decodeSingularMessageField(value: &_storage._scopes) + case 4: try decoder.decodeSingularStringField(value: &_storage._authorizationURL) + case 5: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 6: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._type.isEmpty { + try visitor.visitSingularStringField(value: _storage._type, fieldNumber: 1) + } + if !_storage._flow.isEmpty { + try visitor.visitSingularStringField(value: _storage._flow, fieldNumber: 2) + } + if let v = _storage._scopes { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if !_storage._authorizationURL.isEmpty { + try visitor.visitSingularStringField(value: _storage._authorizationURL, fieldNumber: 4) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 5) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 6) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_Oauth2PasswordSecurity: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Oauth2PasswordSecurity" + + public var type: String { + get {return _storage._type} + set {_uniqueStorage()._type = newValue} + } + + public var flow: String { + get {return _storage._flow} + set {_uniqueStorage()._flow = newValue} + } + + public var scopes: Openapi_V2_Oauth2Scopes { + get {return _storage._scopes ?? Openapi_V2_Oauth2Scopes()} + set {_uniqueStorage()._scopes = newValue} + } + /// Returns true if `scopes` has been explicitly set. + public var hasScopes: Bool {return _storage._scopes != nil} + /// Clears the value of `scopes`. Subsequent reads from it will return its default value. + public mutating func clearScopes() {_storage._scopes = nil} + + public var tokenURL: String { + get {return _storage._tokenURL} + set {_uniqueStorage()._tokenURL = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._type) + case 2: try decoder.decodeSingularStringField(value: &_storage._flow) + case 3: try decoder.decodeSingularMessageField(value: &_storage._scopes) + case 4: try decoder.decodeSingularStringField(value: &_storage._tokenURL) + case 5: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 6: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._type.isEmpty { + try visitor.visitSingularStringField(value: _storage._type, fieldNumber: 1) + } + if !_storage._flow.isEmpty { + try visitor.visitSingularStringField(value: _storage._flow, fieldNumber: 2) + } + if let v = _storage._scopes { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if !_storage._tokenURL.isEmpty { + try visitor.visitSingularStringField(value: _storage._tokenURL, fieldNumber: 4) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 5) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 6) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_Oauth2Scopes: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Oauth2Scopes" + + public var additionalProperties: [Openapi_V2_NamedString] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V2_Operation: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Operation" + + public var tags: [String] { + get {return _storage._tags} + set {_uniqueStorage()._tags = newValue} + } + + /// A brief summary of the operation. + public var summary: String { + get {return _storage._summary} + set {_uniqueStorage()._summary = newValue} + } + + /// A longer description of the operation, GitHub Flavored Markdown is allowed. + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var externalDocs: Openapi_V2_ExternalDocs { + get {return _storage._externalDocs ?? Openapi_V2_ExternalDocs()} + set {_uniqueStorage()._externalDocs = newValue} + } + /// Returns true if `externalDocs` has been explicitly set. + public var hasExternalDocs: Bool {return _storage._externalDocs != nil} + /// Clears the value of `externalDocs`. Subsequent reads from it will return its default value. + public mutating func clearExternalDocs() {_storage._externalDocs = nil} + + /// A unique identifier of the operation. + public var operationID: String { + get {return _storage._operationID} + set {_uniqueStorage()._operationID = newValue} + } + + /// A list of MIME types the API can produce. + public var produces: [String] { + get {return _storage._produces} + set {_uniqueStorage()._produces = newValue} + } + + /// A list of MIME types the API can consume. + public var consumes: [String] { + get {return _storage._consumes} + set {_uniqueStorage()._consumes = newValue} + } + + /// The parameters needed to send a valid API call. + public var parameters: [Openapi_V2_ParametersItem] { + get {return _storage._parameters} + set {_uniqueStorage()._parameters = newValue} + } + + public var responses: Openapi_V2_Responses { + get {return _storage._responses ?? Openapi_V2_Responses()} + set {_uniqueStorage()._responses = newValue} + } + /// Returns true if `responses` has been explicitly set. + public var hasResponses: Bool {return _storage._responses != nil} + /// Clears the value of `responses`. Subsequent reads from it will return its default value. + public mutating func clearResponses() {_storage._responses = nil} + + /// The transfer protocol of the API. + public var schemes: [String] { + get {return _storage._schemes} + set {_uniqueStorage()._schemes = newValue} + } + + public var deprecated: Bool { + get {return _storage._deprecated} + set {_uniqueStorage()._deprecated = newValue} + } + + public var security: [Openapi_V2_SecurityRequirement] { + get {return _storage._security} + set {_uniqueStorage()._security = newValue} + } + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedStringField(value: &_storage._tags) + case 2: try decoder.decodeSingularStringField(value: &_storage._summary) + case 3: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 4: try decoder.decodeSingularMessageField(value: &_storage._externalDocs) + case 5: try decoder.decodeSingularStringField(value: &_storage._operationID) + case 6: try decoder.decodeRepeatedStringField(value: &_storage._produces) + case 7: try decoder.decodeRepeatedStringField(value: &_storage._consumes) + case 8: try decoder.decodeRepeatedMessageField(value: &_storage._parameters) + case 9: try decoder.decodeSingularMessageField(value: &_storage._responses) + case 10: try decoder.decodeRepeatedStringField(value: &_storage._schemes) + case 11: try decoder.decodeSingularBoolField(value: &_storage._deprecated) + case 12: try decoder.decodeRepeatedMessageField(value: &_storage._security) + case 13: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._tags.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._tags, fieldNumber: 1) + } + if !_storage._summary.isEmpty { + try visitor.visitSingularStringField(value: _storage._summary, fieldNumber: 2) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 3) + } + if let v = _storage._externalDocs { + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + } + if !_storage._operationID.isEmpty { + try visitor.visitSingularStringField(value: _storage._operationID, fieldNumber: 5) + } + if !_storage._produces.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._produces, fieldNumber: 6) + } + if !_storage._consumes.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._consumes, fieldNumber: 7) + } + if !_storage._parameters.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._parameters, fieldNumber: 8) + } + if let v = _storage._responses { + try visitor.visitSingularMessageField(value: v, fieldNumber: 9) + } + if !_storage._schemes.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._schemes, fieldNumber: 10) + } + if _storage._deprecated != false { + try visitor.visitSingularBoolField(value: _storage._deprecated, fieldNumber: 11) + } + if !_storage._security.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._security, fieldNumber: 12) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 13) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_Parameter: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Parameter" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var bodyParameter: Openapi_V2_BodyParameter { + get { + if case .bodyParameter(let v)? = _storage._oneof {return v} + return Openapi_V2_BodyParameter() + } + set {_uniqueStorage()._oneof = .bodyParameter(newValue)} + } + + public var nonBodyParameter: Openapi_V2_NonBodyParameter { + get { + if case .nonBodyParameter(let v)? = _storage._oneof {return v} + return Openapi_V2_NonBodyParameter() + } + set {_uniqueStorage()._oneof = .nonBodyParameter(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case bodyParameter(Openapi_V2_BodyParameter) + case nonBodyParameter(Openapi_V2_NonBodyParameter) + + public static func ==(lhs: Openapi_V2_Parameter.OneOf_Oneof, rhs: Openapi_V2_Parameter.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.bodyParameter(let l), .bodyParameter(let r)): return l == r + case (.nonBodyParameter(let l), .nonBodyParameter(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V2_BodyParameter? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .bodyParameter(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .bodyParameter(v)} + case 2: + var v: Openapi_V2_NonBodyParameter? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .nonBodyParameter(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .nonBodyParameter(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .bodyParameter(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .nonBodyParameter(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// One or more JSON representations for parameters +public struct Openapi_V2_ParameterDefinitions: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".ParameterDefinitions" + + public var additionalProperties: [Openapi_V2_NamedParameter] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V2_ParametersItem: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".ParametersItem" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var parameter: Openapi_V2_Parameter { + get { + if case .parameter(let v)? = _storage._oneof {return v} + return Openapi_V2_Parameter() + } + set {_uniqueStorage()._oneof = .parameter(newValue)} + } + + public var jsonReference: Openapi_V2_JsonReference { + get { + if case .jsonReference(let v)? = _storage._oneof {return v} + return Openapi_V2_JsonReference() + } + set {_uniqueStorage()._oneof = .jsonReference(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case parameter(Openapi_V2_Parameter) + case jsonReference(Openapi_V2_JsonReference) + + public static func ==(lhs: Openapi_V2_ParametersItem.OneOf_Oneof, rhs: Openapi_V2_ParametersItem.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.parameter(let l), .parameter(let r)): return l == r + case (.jsonReference(let l), .jsonReference(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V2_Parameter? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .parameter(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .parameter(v)} + case 2: + var v: Openapi_V2_JsonReference? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .jsonReference(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .jsonReference(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .parameter(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .jsonReference(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_PathItem: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".PathItem" + + public var ref: String { + get {return _storage._ref} + set {_uniqueStorage()._ref = newValue} + } + + public var get: Openapi_V2_Operation { + get {return _storage._get ?? Openapi_V2_Operation()} + set {_uniqueStorage()._get = newValue} + } + /// Returns true if `get` has been explicitly set. + public var hasGet: Bool {return _storage._get != nil} + /// Clears the value of `get`. Subsequent reads from it will return its default value. + public mutating func clearGet() {_storage._get = nil} + + public var put: Openapi_V2_Operation { + get {return _storage._put ?? Openapi_V2_Operation()} + set {_uniqueStorage()._put = newValue} + } + /// Returns true if `put` has been explicitly set. + public var hasPut: Bool {return _storage._put != nil} + /// Clears the value of `put`. Subsequent reads from it will return its default value. + public mutating func clearPut() {_storage._put = nil} + + public var post: Openapi_V2_Operation { + get {return _storage._post ?? Openapi_V2_Operation()} + set {_uniqueStorage()._post = newValue} + } + /// Returns true if `post` has been explicitly set. + public var hasPost: Bool {return _storage._post != nil} + /// Clears the value of `post`. Subsequent reads from it will return its default value. + public mutating func clearPost() {_storage._post = nil} + + public var delete: Openapi_V2_Operation { + get {return _storage._delete ?? Openapi_V2_Operation()} + set {_uniqueStorage()._delete = newValue} + } + /// Returns true if `delete` has been explicitly set. + public var hasDelete: Bool {return _storage._delete != nil} + /// Clears the value of `delete`. Subsequent reads from it will return its default value. + public mutating func clearDelete() {_storage._delete = nil} + + public var options: Openapi_V2_Operation { + get {return _storage._options ?? Openapi_V2_Operation()} + set {_uniqueStorage()._options = newValue} + } + /// Returns true if `options` has been explicitly set. + public var hasOptions: Bool {return _storage._options != nil} + /// Clears the value of `options`. Subsequent reads from it will return its default value. + public mutating func clearOptions() {_storage._options = nil} + + public var head: Openapi_V2_Operation { + get {return _storage._head ?? Openapi_V2_Operation()} + set {_uniqueStorage()._head = newValue} + } + /// Returns true if `head` has been explicitly set. + public var hasHead: Bool {return _storage._head != nil} + /// Clears the value of `head`. Subsequent reads from it will return its default value. + public mutating func clearHead() {_storage._head = nil} + + public var patch: Openapi_V2_Operation { + get {return _storage._patch ?? Openapi_V2_Operation()} + set {_uniqueStorage()._patch = newValue} + } + /// Returns true if `patch` has been explicitly set. + public var hasPatch: Bool {return _storage._patch != nil} + /// Clears the value of `patch`. Subsequent reads from it will return its default value. + public mutating func clearPatch() {_storage._patch = nil} + + /// The parameters needed to send a valid API call. + public var parameters: [Openapi_V2_ParametersItem] { + get {return _storage._parameters} + set {_uniqueStorage()._parameters = newValue} + } + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._ref) + case 2: try decoder.decodeSingularMessageField(value: &_storage._get) + case 3: try decoder.decodeSingularMessageField(value: &_storage._put) + case 4: try decoder.decodeSingularMessageField(value: &_storage._post) + case 5: try decoder.decodeSingularMessageField(value: &_storage._delete) + case 6: try decoder.decodeSingularMessageField(value: &_storage._options) + case 7: try decoder.decodeSingularMessageField(value: &_storage._head) + case 8: try decoder.decodeSingularMessageField(value: &_storage._patch) + case 9: try decoder.decodeRepeatedMessageField(value: &_storage._parameters) + case 10: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._ref.isEmpty { + try visitor.visitSingularStringField(value: _storage._ref, fieldNumber: 1) + } + if let v = _storage._get { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + if let v = _storage._put { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if let v = _storage._post { + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + } + if let v = _storage._delete { + try visitor.visitSingularMessageField(value: v, fieldNumber: 5) + } + if let v = _storage._options { + try visitor.visitSingularMessageField(value: v, fieldNumber: 6) + } + if let v = _storage._head { + try visitor.visitSingularMessageField(value: v, fieldNumber: 7) + } + if let v = _storage._patch { + try visitor.visitSingularMessageField(value: v, fieldNumber: 8) + } + if !_storage._parameters.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._parameters, fieldNumber: 9) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 10) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_PathParameterSubSchema: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".PathParameterSubSchema" + + /// Determines whether or not this parameter is required or optional. + public var required: Bool { + get {return _storage._required} + set {_uniqueStorage()._required = newValue} + } + + /// Determines the location of the parameter. + public var `in`: String { + get {return _storage._in} + set {_uniqueStorage()._in = newValue} + } + + /// A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed. + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + /// The name of the parameter. + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + public var type: String { + get {return _storage._type} + set {_uniqueStorage()._type = newValue} + } + + public var format: String { + get {return _storage._format} + set {_uniqueStorage()._format = newValue} + } + + public var items: Openapi_V2_PrimitivesItems { + get {return _storage._items ?? Openapi_V2_PrimitivesItems()} + set {_uniqueStorage()._items = newValue} + } + /// Returns true if `items` has been explicitly set. + public var hasItems: Bool {return _storage._items != nil} + /// Clears the value of `items`. Subsequent reads from it will return its default value. + public mutating func clearItems() {_storage._items = nil} + + public var collectionFormat: String { + get {return _storage._collectionFormat} + set {_uniqueStorage()._collectionFormat = newValue} + } + + public var `default`: Openapi_V2_Any { + get {return _storage._default ?? Openapi_V2_Any()} + set {_uniqueStorage()._default = newValue} + } + /// Returns true if ``default`` has been explicitly set. + public var hasDefault: Bool {return _storage._default != nil} + /// Clears the value of ``default``. Subsequent reads from it will return its default value. + public mutating func clearDefault() {_storage._default = nil} + + public var maximum: Double { + get {return _storage._maximum} + set {_uniqueStorage()._maximum = newValue} + } + + public var exclusiveMaximum: Bool { + get {return _storage._exclusiveMaximum} + set {_uniqueStorage()._exclusiveMaximum = newValue} + } + + public var minimum: Double { + get {return _storage._minimum} + set {_uniqueStorage()._minimum = newValue} + } + + public var exclusiveMinimum: Bool { + get {return _storage._exclusiveMinimum} + set {_uniqueStorage()._exclusiveMinimum = newValue} + } + + public var maxLength: Int64 { + get {return _storage._maxLength} + set {_uniqueStorage()._maxLength = newValue} + } + + public var minLength: Int64 { + get {return _storage._minLength} + set {_uniqueStorage()._minLength = newValue} + } + + public var pattern: String { + get {return _storage._pattern} + set {_uniqueStorage()._pattern = newValue} + } + + public var maxItems: Int64 { + get {return _storage._maxItems} + set {_uniqueStorage()._maxItems = newValue} + } + + public var minItems: Int64 { + get {return _storage._minItems} + set {_uniqueStorage()._minItems = newValue} + } + + public var uniqueItems: Bool { + get {return _storage._uniqueItems} + set {_uniqueStorage()._uniqueItems = newValue} + } + + public var `enum`: [Openapi_V2_Any] { + get {return _storage._enum} + set {_uniqueStorage()._enum = newValue} + } + + public var multipleOf: Double { + get {return _storage._multipleOf} + set {_uniqueStorage()._multipleOf = newValue} + } + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularBoolField(value: &_storage._required) + case 2: try decoder.decodeSingularStringField(value: &_storage._in) + case 3: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 4: try decoder.decodeSingularStringField(value: &_storage._name) + case 5: try decoder.decodeSingularStringField(value: &_storage._type) + case 6: try decoder.decodeSingularStringField(value: &_storage._format) + case 7: try decoder.decodeSingularMessageField(value: &_storage._items) + case 8: try decoder.decodeSingularStringField(value: &_storage._collectionFormat) + case 9: try decoder.decodeSingularMessageField(value: &_storage._default) + case 10: try decoder.decodeSingularDoubleField(value: &_storage._maximum) + case 11: try decoder.decodeSingularBoolField(value: &_storage._exclusiveMaximum) + case 12: try decoder.decodeSingularDoubleField(value: &_storage._minimum) + case 13: try decoder.decodeSingularBoolField(value: &_storage._exclusiveMinimum) + case 14: try decoder.decodeSingularInt64Field(value: &_storage._maxLength) + case 15: try decoder.decodeSingularInt64Field(value: &_storage._minLength) + case 16: try decoder.decodeSingularStringField(value: &_storage._pattern) + case 17: try decoder.decodeSingularInt64Field(value: &_storage._maxItems) + case 18: try decoder.decodeSingularInt64Field(value: &_storage._minItems) + case 19: try decoder.decodeSingularBoolField(value: &_storage._uniqueItems) + case 20: try decoder.decodeRepeatedMessageField(value: &_storage._enum) + case 21: try decoder.decodeSingularDoubleField(value: &_storage._multipleOf) + case 22: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if _storage._required != false { + try visitor.visitSingularBoolField(value: _storage._required, fieldNumber: 1) + } + if !_storage._in.isEmpty { + try visitor.visitSingularStringField(value: _storage._in, fieldNumber: 2) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 3) + } + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 4) + } + if !_storage._type.isEmpty { + try visitor.visitSingularStringField(value: _storage._type, fieldNumber: 5) + } + if !_storage._format.isEmpty { + try visitor.visitSingularStringField(value: _storage._format, fieldNumber: 6) + } + if let v = _storage._items { + try visitor.visitSingularMessageField(value: v, fieldNumber: 7) + } + if !_storage._collectionFormat.isEmpty { + try visitor.visitSingularStringField(value: _storage._collectionFormat, fieldNumber: 8) + } + if let v = _storage._default { + try visitor.visitSingularMessageField(value: v, fieldNumber: 9) + } + if _storage._maximum != 0 { + try visitor.visitSingularDoubleField(value: _storage._maximum, fieldNumber: 10) + } + if _storage._exclusiveMaximum != false { + try visitor.visitSingularBoolField(value: _storage._exclusiveMaximum, fieldNumber: 11) + } + if _storage._minimum != 0 { + try visitor.visitSingularDoubleField(value: _storage._minimum, fieldNumber: 12) + } + if _storage._exclusiveMinimum != false { + try visitor.visitSingularBoolField(value: _storage._exclusiveMinimum, fieldNumber: 13) + } + if _storage._maxLength != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxLength, fieldNumber: 14) + } + if _storage._minLength != 0 { + try visitor.visitSingularInt64Field(value: _storage._minLength, fieldNumber: 15) + } + if !_storage._pattern.isEmpty { + try visitor.visitSingularStringField(value: _storage._pattern, fieldNumber: 16) + } + if _storage._maxItems != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxItems, fieldNumber: 17) + } + if _storage._minItems != 0 { + try visitor.visitSingularInt64Field(value: _storage._minItems, fieldNumber: 18) + } + if _storage._uniqueItems != false { + try visitor.visitSingularBoolField(value: _storage._uniqueItems, fieldNumber: 19) + } + if !_storage._enum.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._enum, fieldNumber: 20) + } + if _storage._multipleOf != 0 { + try visitor.visitSingularDoubleField(value: _storage._multipleOf, fieldNumber: 21) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 22) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Relative paths to the individual endpoints. They must be relative to the 'basePath'. +public struct Openapi_V2_Paths: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Paths" + + public var vendorExtension: [Openapi_V2_NamedAny] = [] + + public var path: [Openapi_V2_NamedPathItem] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.vendorExtension) + case 2: try decoder.decodeRepeatedMessageField(value: &self.path) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: self.vendorExtension, fieldNumber: 1) + } + if !self.path.isEmpty { + try visitor.visitRepeatedMessageField(value: self.path, fieldNumber: 2) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V2_PrimitivesItems: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".PrimitivesItems" + + public var type: String { + get {return _storage._type} + set {_uniqueStorage()._type = newValue} + } + + public var format: String { + get {return _storage._format} + set {_uniqueStorage()._format = newValue} + } + + public var items: Openapi_V2_PrimitivesItems { + get {return _storage._items ?? Openapi_V2_PrimitivesItems()} + set {_uniqueStorage()._items = newValue} + } + /// Returns true if `items` has been explicitly set. + public var hasItems: Bool {return _storage._items != nil} + /// Clears the value of `items`. Subsequent reads from it will return its default value. + public mutating func clearItems() {_storage._items = nil} + + public var collectionFormat: String { + get {return _storage._collectionFormat} + set {_uniqueStorage()._collectionFormat = newValue} + } + + public var `default`: Openapi_V2_Any { + get {return _storage._default ?? Openapi_V2_Any()} + set {_uniqueStorage()._default = newValue} + } + /// Returns true if ``default`` has been explicitly set. + public var hasDefault: Bool {return _storage._default != nil} + /// Clears the value of ``default``. Subsequent reads from it will return its default value. + public mutating func clearDefault() {_storage._default = nil} + + public var maximum: Double { + get {return _storage._maximum} + set {_uniqueStorage()._maximum = newValue} + } + + public var exclusiveMaximum: Bool { + get {return _storage._exclusiveMaximum} + set {_uniqueStorage()._exclusiveMaximum = newValue} + } + + public var minimum: Double { + get {return _storage._minimum} + set {_uniqueStorage()._minimum = newValue} + } + + public var exclusiveMinimum: Bool { + get {return _storage._exclusiveMinimum} + set {_uniqueStorage()._exclusiveMinimum = newValue} + } + + public var maxLength: Int64 { + get {return _storage._maxLength} + set {_uniqueStorage()._maxLength = newValue} + } + + public var minLength: Int64 { + get {return _storage._minLength} + set {_uniqueStorage()._minLength = newValue} + } + + public var pattern: String { + get {return _storage._pattern} + set {_uniqueStorage()._pattern = newValue} + } + + public var maxItems: Int64 { + get {return _storage._maxItems} + set {_uniqueStorage()._maxItems = newValue} + } + + public var minItems: Int64 { + get {return _storage._minItems} + set {_uniqueStorage()._minItems = newValue} + } + + public var uniqueItems: Bool { + get {return _storage._uniqueItems} + set {_uniqueStorage()._uniqueItems = newValue} + } + + public var `enum`: [Openapi_V2_Any] { + get {return _storage._enum} + set {_uniqueStorage()._enum = newValue} + } + + public var multipleOf: Double { + get {return _storage._multipleOf} + set {_uniqueStorage()._multipleOf = newValue} + } + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._type) + case 2: try decoder.decodeSingularStringField(value: &_storage._format) + case 3: try decoder.decodeSingularMessageField(value: &_storage._items) + case 4: try decoder.decodeSingularStringField(value: &_storage._collectionFormat) + case 5: try decoder.decodeSingularMessageField(value: &_storage._default) + case 6: try decoder.decodeSingularDoubleField(value: &_storage._maximum) + case 7: try decoder.decodeSingularBoolField(value: &_storage._exclusiveMaximum) + case 8: try decoder.decodeSingularDoubleField(value: &_storage._minimum) + case 9: try decoder.decodeSingularBoolField(value: &_storage._exclusiveMinimum) + case 10: try decoder.decodeSingularInt64Field(value: &_storage._maxLength) + case 11: try decoder.decodeSingularInt64Field(value: &_storage._minLength) + case 12: try decoder.decodeSingularStringField(value: &_storage._pattern) + case 13: try decoder.decodeSingularInt64Field(value: &_storage._maxItems) + case 14: try decoder.decodeSingularInt64Field(value: &_storage._minItems) + case 15: try decoder.decodeSingularBoolField(value: &_storage._uniqueItems) + case 16: try decoder.decodeRepeatedMessageField(value: &_storage._enum) + case 17: try decoder.decodeSingularDoubleField(value: &_storage._multipleOf) + case 18: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._type.isEmpty { + try visitor.visitSingularStringField(value: _storage._type, fieldNumber: 1) + } + if !_storage._format.isEmpty { + try visitor.visitSingularStringField(value: _storage._format, fieldNumber: 2) + } + if let v = _storage._items { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if !_storage._collectionFormat.isEmpty { + try visitor.visitSingularStringField(value: _storage._collectionFormat, fieldNumber: 4) + } + if let v = _storage._default { + try visitor.visitSingularMessageField(value: v, fieldNumber: 5) + } + if _storage._maximum != 0 { + try visitor.visitSingularDoubleField(value: _storage._maximum, fieldNumber: 6) + } + if _storage._exclusiveMaximum != false { + try visitor.visitSingularBoolField(value: _storage._exclusiveMaximum, fieldNumber: 7) + } + if _storage._minimum != 0 { + try visitor.visitSingularDoubleField(value: _storage._minimum, fieldNumber: 8) + } + if _storage._exclusiveMinimum != false { + try visitor.visitSingularBoolField(value: _storage._exclusiveMinimum, fieldNumber: 9) + } + if _storage._maxLength != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxLength, fieldNumber: 10) + } + if _storage._minLength != 0 { + try visitor.visitSingularInt64Field(value: _storage._minLength, fieldNumber: 11) + } + if !_storage._pattern.isEmpty { + try visitor.visitSingularStringField(value: _storage._pattern, fieldNumber: 12) + } + if _storage._maxItems != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxItems, fieldNumber: 13) + } + if _storage._minItems != 0 { + try visitor.visitSingularInt64Field(value: _storage._minItems, fieldNumber: 14) + } + if _storage._uniqueItems != false { + try visitor.visitSingularBoolField(value: _storage._uniqueItems, fieldNumber: 15) + } + if !_storage._enum.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._enum, fieldNumber: 16) + } + if _storage._multipleOf != 0 { + try visitor.visitSingularDoubleField(value: _storage._multipleOf, fieldNumber: 17) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 18) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_Properties: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Properties" + + public var additionalProperties: [Openapi_V2_NamedSchema] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V2_QueryParameterSubSchema: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".QueryParameterSubSchema" + + /// Determines whether or not this parameter is required or optional. + public var required: Bool { + get {return _storage._required} + set {_uniqueStorage()._required = newValue} + } + + /// Determines the location of the parameter. + public var `in`: String { + get {return _storage._in} + set {_uniqueStorage()._in = newValue} + } + + /// A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed. + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + /// The name of the parameter. + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// allows sending a parameter by name only or with an empty value. + public var allowEmptyValue: Bool { + get {return _storage._allowEmptyValue} + set {_uniqueStorage()._allowEmptyValue = newValue} + } + + public var type: String { + get {return _storage._type} + set {_uniqueStorage()._type = newValue} + } + + public var format: String { + get {return _storage._format} + set {_uniqueStorage()._format = newValue} + } + + public var items: Openapi_V2_PrimitivesItems { + get {return _storage._items ?? Openapi_V2_PrimitivesItems()} + set {_uniqueStorage()._items = newValue} + } + /// Returns true if `items` has been explicitly set. + public var hasItems: Bool {return _storage._items != nil} + /// Clears the value of `items`. Subsequent reads from it will return its default value. + public mutating func clearItems() {_storage._items = nil} + + public var collectionFormat: String { + get {return _storage._collectionFormat} + set {_uniqueStorage()._collectionFormat = newValue} + } + + public var `default`: Openapi_V2_Any { + get {return _storage._default ?? Openapi_V2_Any()} + set {_uniqueStorage()._default = newValue} + } + /// Returns true if ``default`` has been explicitly set. + public var hasDefault: Bool {return _storage._default != nil} + /// Clears the value of ``default``. Subsequent reads from it will return its default value. + public mutating func clearDefault() {_storage._default = nil} + + public var maximum: Double { + get {return _storage._maximum} + set {_uniqueStorage()._maximum = newValue} + } + + public var exclusiveMaximum: Bool { + get {return _storage._exclusiveMaximum} + set {_uniqueStorage()._exclusiveMaximum = newValue} + } + + public var minimum: Double { + get {return _storage._minimum} + set {_uniqueStorage()._minimum = newValue} + } + + public var exclusiveMinimum: Bool { + get {return _storage._exclusiveMinimum} + set {_uniqueStorage()._exclusiveMinimum = newValue} + } + + public var maxLength: Int64 { + get {return _storage._maxLength} + set {_uniqueStorage()._maxLength = newValue} + } + + public var minLength: Int64 { + get {return _storage._minLength} + set {_uniqueStorage()._minLength = newValue} + } + + public var pattern: String { + get {return _storage._pattern} + set {_uniqueStorage()._pattern = newValue} + } + + public var maxItems: Int64 { + get {return _storage._maxItems} + set {_uniqueStorage()._maxItems = newValue} + } + + public var minItems: Int64 { + get {return _storage._minItems} + set {_uniqueStorage()._minItems = newValue} + } + + public var uniqueItems: Bool { + get {return _storage._uniqueItems} + set {_uniqueStorage()._uniqueItems = newValue} + } + + public var `enum`: [Openapi_V2_Any] { + get {return _storage._enum} + set {_uniqueStorage()._enum = newValue} + } + + public var multipleOf: Double { + get {return _storage._multipleOf} + set {_uniqueStorage()._multipleOf = newValue} + } + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularBoolField(value: &_storage._required) + case 2: try decoder.decodeSingularStringField(value: &_storage._in) + case 3: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 4: try decoder.decodeSingularStringField(value: &_storage._name) + case 5: try decoder.decodeSingularBoolField(value: &_storage._allowEmptyValue) + case 6: try decoder.decodeSingularStringField(value: &_storage._type) + case 7: try decoder.decodeSingularStringField(value: &_storage._format) + case 8: try decoder.decodeSingularMessageField(value: &_storage._items) + case 9: try decoder.decodeSingularStringField(value: &_storage._collectionFormat) + case 10: try decoder.decodeSingularMessageField(value: &_storage._default) + case 11: try decoder.decodeSingularDoubleField(value: &_storage._maximum) + case 12: try decoder.decodeSingularBoolField(value: &_storage._exclusiveMaximum) + case 13: try decoder.decodeSingularDoubleField(value: &_storage._minimum) + case 14: try decoder.decodeSingularBoolField(value: &_storage._exclusiveMinimum) + case 15: try decoder.decodeSingularInt64Field(value: &_storage._maxLength) + case 16: try decoder.decodeSingularInt64Field(value: &_storage._minLength) + case 17: try decoder.decodeSingularStringField(value: &_storage._pattern) + case 18: try decoder.decodeSingularInt64Field(value: &_storage._maxItems) + case 19: try decoder.decodeSingularInt64Field(value: &_storage._minItems) + case 20: try decoder.decodeSingularBoolField(value: &_storage._uniqueItems) + case 21: try decoder.decodeRepeatedMessageField(value: &_storage._enum) + case 22: try decoder.decodeSingularDoubleField(value: &_storage._multipleOf) + case 23: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if _storage._required != false { + try visitor.visitSingularBoolField(value: _storage._required, fieldNumber: 1) + } + if !_storage._in.isEmpty { + try visitor.visitSingularStringField(value: _storage._in, fieldNumber: 2) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 3) + } + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 4) + } + if _storage._allowEmptyValue != false { + try visitor.visitSingularBoolField(value: _storage._allowEmptyValue, fieldNumber: 5) + } + if !_storage._type.isEmpty { + try visitor.visitSingularStringField(value: _storage._type, fieldNumber: 6) + } + if !_storage._format.isEmpty { + try visitor.visitSingularStringField(value: _storage._format, fieldNumber: 7) + } + if let v = _storage._items { + try visitor.visitSingularMessageField(value: v, fieldNumber: 8) + } + if !_storage._collectionFormat.isEmpty { + try visitor.visitSingularStringField(value: _storage._collectionFormat, fieldNumber: 9) + } + if let v = _storage._default { + try visitor.visitSingularMessageField(value: v, fieldNumber: 10) + } + if _storage._maximum != 0 { + try visitor.visitSingularDoubleField(value: _storage._maximum, fieldNumber: 11) + } + if _storage._exclusiveMaximum != false { + try visitor.visitSingularBoolField(value: _storage._exclusiveMaximum, fieldNumber: 12) + } + if _storage._minimum != 0 { + try visitor.visitSingularDoubleField(value: _storage._minimum, fieldNumber: 13) + } + if _storage._exclusiveMinimum != false { + try visitor.visitSingularBoolField(value: _storage._exclusiveMinimum, fieldNumber: 14) + } + if _storage._maxLength != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxLength, fieldNumber: 15) + } + if _storage._minLength != 0 { + try visitor.visitSingularInt64Field(value: _storage._minLength, fieldNumber: 16) + } + if !_storage._pattern.isEmpty { + try visitor.visitSingularStringField(value: _storage._pattern, fieldNumber: 17) + } + if _storage._maxItems != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxItems, fieldNumber: 18) + } + if _storage._minItems != 0 { + try visitor.visitSingularInt64Field(value: _storage._minItems, fieldNumber: 19) + } + if _storage._uniqueItems != false { + try visitor.visitSingularBoolField(value: _storage._uniqueItems, fieldNumber: 20) + } + if !_storage._enum.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._enum, fieldNumber: 21) + } + if _storage._multipleOf != 0 { + try visitor.visitSingularDoubleField(value: _storage._multipleOf, fieldNumber: 22) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 23) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_Response: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Response" + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var schema: Openapi_V2_SchemaItem { + get {return _storage._schema ?? Openapi_V2_SchemaItem()} + set {_uniqueStorage()._schema = newValue} + } + /// Returns true if `schema` has been explicitly set. + public var hasSchema: Bool {return _storage._schema != nil} + /// Clears the value of `schema`. Subsequent reads from it will return its default value. + public mutating func clearSchema() {_storage._schema = nil} + + public var headers: Openapi_V2_Headers { + get {return _storage._headers ?? Openapi_V2_Headers()} + set {_uniqueStorage()._headers = newValue} + } + /// Returns true if `headers` has been explicitly set. + public var hasHeaders: Bool {return _storage._headers != nil} + /// Clears the value of `headers`. Subsequent reads from it will return its default value. + public mutating func clearHeaders() {_storage._headers = nil} + + public var examples: Openapi_V2_Examples { + get {return _storage._examples ?? Openapi_V2_Examples()} + set {_uniqueStorage()._examples = newValue} + } + /// Returns true if `examples` has been explicitly set. + public var hasExamples: Bool {return _storage._examples != nil} + /// Clears the value of `examples`. Subsequent reads from it will return its default value. + public mutating func clearExamples() {_storage._examples = nil} + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 2: try decoder.decodeSingularMessageField(value: &_storage._schema) + case 3: try decoder.decodeSingularMessageField(value: &_storage._headers) + case 4: try decoder.decodeSingularMessageField(value: &_storage._examples) + case 5: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 1) + } + if let v = _storage._schema { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + if let v = _storage._headers { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if let v = _storage._examples { + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 5) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// One or more JSON representations for parameters +public struct Openapi_V2_ResponseDefinitions: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".ResponseDefinitions" + + public var additionalProperties: [Openapi_V2_NamedResponse] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V2_ResponseValue: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".ResponseValue" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var response: Openapi_V2_Response { + get { + if case .response(let v)? = _storage._oneof {return v} + return Openapi_V2_Response() + } + set {_uniqueStorage()._oneof = .response(newValue)} + } + + public var jsonReference: Openapi_V2_JsonReference { + get { + if case .jsonReference(let v)? = _storage._oneof {return v} + return Openapi_V2_JsonReference() + } + set {_uniqueStorage()._oneof = .jsonReference(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case response(Openapi_V2_Response) + case jsonReference(Openapi_V2_JsonReference) + + public static func ==(lhs: Openapi_V2_ResponseValue.OneOf_Oneof, rhs: Openapi_V2_ResponseValue.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.response(let l), .response(let r)): return l == r + case (.jsonReference(let l), .jsonReference(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V2_Response? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .response(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .response(v)} + case 2: + var v: Openapi_V2_JsonReference? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .jsonReference(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .jsonReference(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .response(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .jsonReference(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Response objects names can either be any valid HTTP status code or 'default'. +public struct Openapi_V2_Responses: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Responses" + + public var responseCode: [Openapi_V2_NamedResponseValue] = [] + + public var vendorExtension: [Openapi_V2_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.responseCode) + case 2: try decoder.decodeRepeatedMessageField(value: &self.vendorExtension) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.responseCode.isEmpty { + try visitor.visitRepeatedMessageField(value: self.responseCode, fieldNumber: 1) + } + if !self.vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: self.vendorExtension, fieldNumber: 2) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// A deterministic version of a JSON Schema object. +public struct Openapi_V2_Schema: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Schema" + + public var ref: String { + get {return _storage._ref} + set {_uniqueStorage()._ref = newValue} + } + + public var format: String { + get {return _storage._format} + set {_uniqueStorage()._format = newValue} + } + + public var title: String { + get {return _storage._title} + set {_uniqueStorage()._title = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var `default`: Openapi_V2_Any { + get {return _storage._default ?? Openapi_V2_Any()} + set {_uniqueStorage()._default = newValue} + } + /// Returns true if ``default`` has been explicitly set. + public var hasDefault: Bool {return _storage._default != nil} + /// Clears the value of ``default``. Subsequent reads from it will return its default value. + public mutating func clearDefault() {_storage._default = nil} + + public var multipleOf: Double { + get {return _storage._multipleOf} + set {_uniqueStorage()._multipleOf = newValue} + } + + public var maximum: Double { + get {return _storage._maximum} + set {_uniqueStorage()._maximum = newValue} + } + + public var exclusiveMaximum: Bool { + get {return _storage._exclusiveMaximum} + set {_uniqueStorage()._exclusiveMaximum = newValue} + } + + public var minimum: Double { + get {return _storage._minimum} + set {_uniqueStorage()._minimum = newValue} + } + + public var exclusiveMinimum: Bool { + get {return _storage._exclusiveMinimum} + set {_uniqueStorage()._exclusiveMinimum = newValue} + } + + public var maxLength: Int64 { + get {return _storage._maxLength} + set {_uniqueStorage()._maxLength = newValue} + } + + public var minLength: Int64 { + get {return _storage._minLength} + set {_uniqueStorage()._minLength = newValue} + } + + public var pattern: String { + get {return _storage._pattern} + set {_uniqueStorage()._pattern = newValue} + } + + public var maxItems: Int64 { + get {return _storage._maxItems} + set {_uniqueStorage()._maxItems = newValue} + } + + public var minItems: Int64 { + get {return _storage._minItems} + set {_uniqueStorage()._minItems = newValue} + } + + public var uniqueItems: Bool { + get {return _storage._uniqueItems} + set {_uniqueStorage()._uniqueItems = newValue} + } + + public var maxProperties: Int64 { + get {return _storage._maxProperties} + set {_uniqueStorage()._maxProperties = newValue} + } + + public var minProperties: Int64 { + get {return _storage._minProperties} + set {_uniqueStorage()._minProperties = newValue} + } + + public var required: [String] { + get {return _storage._required} + set {_uniqueStorage()._required = newValue} + } + + public var `enum`: [Openapi_V2_Any] { + get {return _storage._enum} + set {_uniqueStorage()._enum = newValue} + } + + public var additionalProperties: Openapi_V2_AdditionalPropertiesItem { + get {return _storage._additionalProperties ?? Openapi_V2_AdditionalPropertiesItem()} + set {_uniqueStorage()._additionalProperties = newValue} + } + /// Returns true if `additionalProperties` has been explicitly set. + public var hasAdditionalProperties: Bool {return _storage._additionalProperties != nil} + /// Clears the value of `additionalProperties`. Subsequent reads from it will return its default value. + public mutating func clearAdditionalProperties() {_storage._additionalProperties = nil} + + public var type: Openapi_V2_TypeItem { + get {return _storage._type ?? Openapi_V2_TypeItem()} + set {_uniqueStorage()._type = newValue} + } + /// Returns true if `type` has been explicitly set. + public var hasType: Bool {return _storage._type != nil} + /// Clears the value of `type`. Subsequent reads from it will return its default value. + public mutating func clearType() {_storage._type = nil} + + public var items: Openapi_V2_ItemsItem { + get {return _storage._items ?? Openapi_V2_ItemsItem()} + set {_uniqueStorage()._items = newValue} + } + /// Returns true if `items` has been explicitly set. + public var hasItems: Bool {return _storage._items != nil} + /// Clears the value of `items`. Subsequent reads from it will return its default value. + public mutating func clearItems() {_storage._items = nil} + + public var allOf: [Openapi_V2_Schema] { + get {return _storage._allOf} + set {_uniqueStorage()._allOf = newValue} + } + + public var properties: Openapi_V2_Properties { + get {return _storage._properties ?? Openapi_V2_Properties()} + set {_uniqueStorage()._properties = newValue} + } + /// Returns true if `properties` has been explicitly set. + public var hasProperties: Bool {return _storage._properties != nil} + /// Clears the value of `properties`. Subsequent reads from it will return its default value. + public mutating func clearProperties() {_storage._properties = nil} + + public var discriminator: String { + get {return _storage._discriminator} + set {_uniqueStorage()._discriminator = newValue} + } + + public var readOnly: Bool { + get {return _storage._readOnly} + set {_uniqueStorage()._readOnly = newValue} + } + + public var xml: Openapi_V2_Xml { + get {return _storage._xml ?? Openapi_V2_Xml()} + set {_uniqueStorage()._xml = newValue} + } + /// Returns true if `xml` has been explicitly set. + public var hasXml: Bool {return _storage._xml != nil} + /// Clears the value of `xml`. Subsequent reads from it will return its default value. + public mutating func clearXml() {_storage._xml = nil} + + public var externalDocs: Openapi_V2_ExternalDocs { + get {return _storage._externalDocs ?? Openapi_V2_ExternalDocs()} + set {_uniqueStorage()._externalDocs = newValue} + } + /// Returns true if `externalDocs` has been explicitly set. + public var hasExternalDocs: Bool {return _storage._externalDocs != nil} + /// Clears the value of `externalDocs`. Subsequent reads from it will return its default value. + public mutating func clearExternalDocs() {_storage._externalDocs = nil} + + public var example: Openapi_V2_Any { + get {return _storage._example ?? Openapi_V2_Any()} + set {_uniqueStorage()._example = newValue} + } + /// Returns true if `example` has been explicitly set. + public var hasExample: Bool {return _storage._example != nil} + /// Clears the value of `example`. Subsequent reads from it will return its default value. + public mutating func clearExample() {_storage._example = nil} + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._ref) + case 2: try decoder.decodeSingularStringField(value: &_storage._format) + case 3: try decoder.decodeSingularStringField(value: &_storage._title) + case 4: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 5: try decoder.decodeSingularMessageField(value: &_storage._default) + case 6: try decoder.decodeSingularDoubleField(value: &_storage._multipleOf) + case 7: try decoder.decodeSingularDoubleField(value: &_storage._maximum) + case 8: try decoder.decodeSingularBoolField(value: &_storage._exclusiveMaximum) + case 9: try decoder.decodeSingularDoubleField(value: &_storage._minimum) + case 10: try decoder.decodeSingularBoolField(value: &_storage._exclusiveMinimum) + case 11: try decoder.decodeSingularInt64Field(value: &_storage._maxLength) + case 12: try decoder.decodeSingularInt64Field(value: &_storage._minLength) + case 13: try decoder.decodeSingularStringField(value: &_storage._pattern) + case 14: try decoder.decodeSingularInt64Field(value: &_storage._maxItems) + case 15: try decoder.decodeSingularInt64Field(value: &_storage._minItems) + case 16: try decoder.decodeSingularBoolField(value: &_storage._uniqueItems) + case 17: try decoder.decodeSingularInt64Field(value: &_storage._maxProperties) + case 18: try decoder.decodeSingularInt64Field(value: &_storage._minProperties) + case 19: try decoder.decodeRepeatedStringField(value: &_storage._required) + case 20: try decoder.decodeRepeatedMessageField(value: &_storage._enum) + case 21: try decoder.decodeSingularMessageField(value: &_storage._additionalProperties) + case 22: try decoder.decodeSingularMessageField(value: &_storage._type) + case 23: try decoder.decodeSingularMessageField(value: &_storage._items) + case 24: try decoder.decodeRepeatedMessageField(value: &_storage._allOf) + case 25: try decoder.decodeSingularMessageField(value: &_storage._properties) + case 26: try decoder.decodeSingularStringField(value: &_storage._discriminator) + case 27: try decoder.decodeSingularBoolField(value: &_storage._readOnly) + case 28: try decoder.decodeSingularMessageField(value: &_storage._xml) + case 29: try decoder.decodeSingularMessageField(value: &_storage._externalDocs) + case 30: try decoder.decodeSingularMessageField(value: &_storage._example) + case 31: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._ref.isEmpty { + try visitor.visitSingularStringField(value: _storage._ref, fieldNumber: 1) + } + if !_storage._format.isEmpty { + try visitor.visitSingularStringField(value: _storage._format, fieldNumber: 2) + } + if !_storage._title.isEmpty { + try visitor.visitSingularStringField(value: _storage._title, fieldNumber: 3) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 4) + } + if let v = _storage._default { + try visitor.visitSingularMessageField(value: v, fieldNumber: 5) + } + if _storage._multipleOf != 0 { + try visitor.visitSingularDoubleField(value: _storage._multipleOf, fieldNumber: 6) + } + if _storage._maximum != 0 { + try visitor.visitSingularDoubleField(value: _storage._maximum, fieldNumber: 7) + } + if _storage._exclusiveMaximum != false { + try visitor.visitSingularBoolField(value: _storage._exclusiveMaximum, fieldNumber: 8) + } + if _storage._minimum != 0 { + try visitor.visitSingularDoubleField(value: _storage._minimum, fieldNumber: 9) + } + if _storage._exclusiveMinimum != false { + try visitor.visitSingularBoolField(value: _storage._exclusiveMinimum, fieldNumber: 10) + } + if _storage._maxLength != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxLength, fieldNumber: 11) + } + if _storage._minLength != 0 { + try visitor.visitSingularInt64Field(value: _storage._minLength, fieldNumber: 12) + } + if !_storage._pattern.isEmpty { + try visitor.visitSingularStringField(value: _storage._pattern, fieldNumber: 13) + } + if _storage._maxItems != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxItems, fieldNumber: 14) + } + if _storage._minItems != 0 { + try visitor.visitSingularInt64Field(value: _storage._minItems, fieldNumber: 15) + } + if _storage._uniqueItems != false { + try visitor.visitSingularBoolField(value: _storage._uniqueItems, fieldNumber: 16) + } + if _storage._maxProperties != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxProperties, fieldNumber: 17) + } + if _storage._minProperties != 0 { + try visitor.visitSingularInt64Field(value: _storage._minProperties, fieldNumber: 18) + } + if !_storage._required.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._required, fieldNumber: 19) + } + if !_storage._enum.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._enum, fieldNumber: 20) + } + if let v = _storage._additionalProperties { + try visitor.visitSingularMessageField(value: v, fieldNumber: 21) + } + if let v = _storage._type { + try visitor.visitSingularMessageField(value: v, fieldNumber: 22) + } + if let v = _storage._items { + try visitor.visitSingularMessageField(value: v, fieldNumber: 23) + } + if !_storage._allOf.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._allOf, fieldNumber: 24) + } + if let v = _storage._properties { + try visitor.visitSingularMessageField(value: v, fieldNumber: 25) + } + if !_storage._discriminator.isEmpty { + try visitor.visitSingularStringField(value: _storage._discriminator, fieldNumber: 26) + } + if _storage._readOnly != false { + try visitor.visitSingularBoolField(value: _storage._readOnly, fieldNumber: 27) + } + if let v = _storage._xml { + try visitor.visitSingularMessageField(value: v, fieldNumber: 28) + } + if let v = _storage._externalDocs { + try visitor.visitSingularMessageField(value: v, fieldNumber: 29) + } + if let v = _storage._example { + try visitor.visitSingularMessageField(value: v, fieldNumber: 30) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 31) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_SchemaItem: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".SchemaItem" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var schema: Openapi_V2_Schema { + get { + if case .schema(let v)? = _storage._oneof {return v} + return Openapi_V2_Schema() + } + set {_uniqueStorage()._oneof = .schema(newValue)} + } + + public var fileSchema: Openapi_V2_FileSchema { + get { + if case .fileSchema(let v)? = _storage._oneof {return v} + return Openapi_V2_FileSchema() + } + set {_uniqueStorage()._oneof = .fileSchema(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case schema(Openapi_V2_Schema) + case fileSchema(Openapi_V2_FileSchema) + + public static func ==(lhs: Openapi_V2_SchemaItem.OneOf_Oneof, rhs: Openapi_V2_SchemaItem.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.schema(let l), .schema(let r)): return l == r + case (.fileSchema(let l), .fileSchema(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V2_Schema? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .schema(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .schema(v)} + case 2: + var v: Openapi_V2_FileSchema? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .fileSchema(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .fileSchema(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .schema(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .fileSchema(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_SecurityDefinitions: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".SecurityDefinitions" + + public var additionalProperties: [Openapi_V2_NamedSecurityDefinitionsItem] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V2_SecurityDefinitionsItem: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".SecurityDefinitionsItem" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var basicAuthenticationSecurity: Openapi_V2_BasicAuthenticationSecurity { + get { + if case .basicAuthenticationSecurity(let v)? = _storage._oneof {return v} + return Openapi_V2_BasicAuthenticationSecurity() + } + set {_uniqueStorage()._oneof = .basicAuthenticationSecurity(newValue)} + } + + public var apiKeySecurity: Openapi_V2_ApiKeySecurity { + get { + if case .apiKeySecurity(let v)? = _storage._oneof {return v} + return Openapi_V2_ApiKeySecurity() + } + set {_uniqueStorage()._oneof = .apiKeySecurity(newValue)} + } + + public var oauth2ImplicitSecurity: Openapi_V2_Oauth2ImplicitSecurity { + get { + if case .oauth2ImplicitSecurity(let v)? = _storage._oneof {return v} + return Openapi_V2_Oauth2ImplicitSecurity() + } + set {_uniqueStorage()._oneof = .oauth2ImplicitSecurity(newValue)} + } + + public var oauth2PasswordSecurity: Openapi_V2_Oauth2PasswordSecurity { + get { + if case .oauth2PasswordSecurity(let v)? = _storage._oneof {return v} + return Openapi_V2_Oauth2PasswordSecurity() + } + set {_uniqueStorage()._oneof = .oauth2PasswordSecurity(newValue)} + } + + public var oauth2ApplicationSecurity: Openapi_V2_Oauth2ApplicationSecurity { + get { + if case .oauth2ApplicationSecurity(let v)? = _storage._oneof {return v} + return Openapi_V2_Oauth2ApplicationSecurity() + } + set {_uniqueStorage()._oneof = .oauth2ApplicationSecurity(newValue)} + } + + public var oauth2AccessCodeSecurity: Openapi_V2_Oauth2AccessCodeSecurity { + get { + if case .oauth2AccessCodeSecurity(let v)? = _storage._oneof {return v} + return Openapi_V2_Oauth2AccessCodeSecurity() + } + set {_uniqueStorage()._oneof = .oauth2AccessCodeSecurity(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case basicAuthenticationSecurity(Openapi_V2_BasicAuthenticationSecurity) + case apiKeySecurity(Openapi_V2_ApiKeySecurity) + case oauth2ImplicitSecurity(Openapi_V2_Oauth2ImplicitSecurity) + case oauth2PasswordSecurity(Openapi_V2_Oauth2PasswordSecurity) + case oauth2ApplicationSecurity(Openapi_V2_Oauth2ApplicationSecurity) + case oauth2AccessCodeSecurity(Openapi_V2_Oauth2AccessCodeSecurity) + + public static func ==(lhs: Openapi_V2_SecurityDefinitionsItem.OneOf_Oneof, rhs: Openapi_V2_SecurityDefinitionsItem.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.basicAuthenticationSecurity(let l), .basicAuthenticationSecurity(let r)): return l == r + case (.apiKeySecurity(let l), .apiKeySecurity(let r)): return l == r + case (.oauth2ImplicitSecurity(let l), .oauth2ImplicitSecurity(let r)): return l == r + case (.oauth2PasswordSecurity(let l), .oauth2PasswordSecurity(let r)): return l == r + case (.oauth2ApplicationSecurity(let l), .oauth2ApplicationSecurity(let r)): return l == r + case (.oauth2AccessCodeSecurity(let l), .oauth2AccessCodeSecurity(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V2_BasicAuthenticationSecurity? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .basicAuthenticationSecurity(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .basicAuthenticationSecurity(v)} + case 2: + var v: Openapi_V2_ApiKeySecurity? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .apiKeySecurity(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .apiKeySecurity(v)} + case 3: + var v: Openapi_V2_Oauth2ImplicitSecurity? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .oauth2ImplicitSecurity(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .oauth2ImplicitSecurity(v)} + case 4: + var v: Openapi_V2_Oauth2PasswordSecurity? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .oauth2PasswordSecurity(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .oauth2PasswordSecurity(v)} + case 5: + var v: Openapi_V2_Oauth2ApplicationSecurity? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .oauth2ApplicationSecurity(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .oauth2ApplicationSecurity(v)} + case 6: + var v: Openapi_V2_Oauth2AccessCodeSecurity? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .oauth2AccessCodeSecurity(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .oauth2AccessCodeSecurity(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .basicAuthenticationSecurity(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .apiKeySecurity(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + case .oauth2ImplicitSecurity(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + case .oauth2PasswordSecurity(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + case .oauth2ApplicationSecurity(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 5) + case .oauth2AccessCodeSecurity(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 6) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_SecurityRequirement: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".SecurityRequirement" + + public var additionalProperties: [Openapi_V2_NamedStringArray] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V2_StringArray: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".StringArray" + + public var value: [String] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedStringField(value: &self.value) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.value.isEmpty { + try visitor.visitRepeatedStringField(value: self.value, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V2_Tag: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Tag" + + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var externalDocs: Openapi_V2_ExternalDocs { + get {return _storage._externalDocs ?? Openapi_V2_ExternalDocs()} + set {_uniqueStorage()._externalDocs = newValue} + } + /// Returns true if `externalDocs` has been explicitly set. + public var hasExternalDocs: Bool {return _storage._externalDocs != nil} + /// Clears the value of `externalDocs`. Subsequent reads from it will return its default value. + public mutating func clearExternalDocs() {_storage._externalDocs = nil} + + public var vendorExtension: [Openapi_V2_NamedAny] { + get {return _storage._vendorExtension} + set {_uniqueStorage()._vendorExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 3: try decoder.decodeSingularMessageField(value: &_storage._externalDocs) + case 4: try decoder.decodeRepeatedMessageField(value: &_storage._vendorExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 2) + } + if let v = _storage._externalDocs { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if !_storage._vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._vendorExtension, fieldNumber: 4) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V2_TypeItem: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".TypeItem" + + public var value: [String] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedStringField(value: &self.value) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.value.isEmpty { + try visitor.visitRepeatedStringField(value: self.value, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Any property starting with x- is valid. +public struct Openapi_V2_VendorExtension: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".VendorExtension" + + public var additionalProperties: [Openapi_V2_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V2_Xml: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Xml" + + public var name: String = String() + + public var namespace: String = String() + + public var prefix: String = String() + + public var attribute: Bool = false + + public var wrapped: Bool = false + + public var vendorExtension: [Openapi_V2_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.name) + case 2: try decoder.decodeSingularStringField(value: &self.namespace) + case 3: try decoder.decodeSingularStringField(value: &self.prefix) + case 4: try decoder.decodeSingularBoolField(value: &self.attribute) + case 5: try decoder.decodeSingularBoolField(value: &self.wrapped) + case 6: try decoder.decodeRepeatedMessageField(value: &self.vendorExtension) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.name.isEmpty { + try visitor.visitSingularStringField(value: self.name, fieldNumber: 1) + } + if !self.namespace.isEmpty { + try visitor.visitSingularStringField(value: self.namespace, fieldNumber: 2) + } + if !self.prefix.isEmpty { + try visitor.visitSingularStringField(value: self.prefix, fieldNumber: 3) + } + if self.attribute != false { + try visitor.visitSingularBoolField(value: self.attribute, fieldNumber: 4) + } + if self.wrapped != false { + try visitor.visitSingularBoolField(value: self.wrapped, fieldNumber: 5) + } + if !self.vendorExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: self.vendorExtension, fieldNumber: 6) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +// MARK: - Code below here is support for the SwiftProtobuf runtime. + +fileprivate let _protobuf_package = "openapi.v2" + +extension Openapi_V2_AdditionalPropertiesItem: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "schema"), + 2: .same(proto: "boolean"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V2_AdditionalPropertiesItem.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_AdditionalPropertiesItem) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Any: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "value"), + 2: .same(proto: "yaml"), + ] + + fileprivate class _StorageClass { + var _value: SwiftProtobuf.Google_Protobuf_Any? = nil + var _yaml: String = String() + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _value = source._value + _yaml = source._yaml + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Any) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._value != other_storage._value {return false} + if _storage._yaml != other_storage._yaml {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_ApiKeySecurity: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "type"), + 2: .same(proto: "name"), + 3: .same(proto: "in"), + 4: .same(proto: "description"), + 5: .standard(proto: "vendor_extension"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_ApiKeySecurity) -> Bool { + if self.type != other.type {return false} + if self.name != other.name {return false} + if self.`in` != other.`in` {return false} + if self.description_p != other.description_p {return false} + if self.vendorExtension != other.vendorExtension {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_BasicAuthenticationSecurity: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "type"), + 2: .same(proto: "description"), + 3: .standard(proto: "vendor_extension"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_BasicAuthenticationSecurity) -> Bool { + if self.type != other.type {return false} + if self.description_p != other.description_p {return false} + if self.vendorExtension != other.vendorExtension {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_BodyParameter: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "description"), + 2: .same(proto: "name"), + 3: .same(proto: "in"), + 4: .same(proto: "required"), + 5: .same(proto: "schema"), + 6: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _description_p: String = String() + var _name: String = String() + var _in: String = String() + var _required: Bool = false + var _schema: Openapi_V2_Schema? = nil + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _description_p = source._description_p + _name = source._name + _in = source._in + _required = source._required + _schema = source._schema + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_BodyParameter) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._description_p != other_storage._description_p {return false} + if _storage._name != other_storage._name {return false} + if _storage._in != other_storage._in {return false} + if _storage._required != other_storage._required {return false} + if _storage._schema != other_storage._schema {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Contact: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "url"), + 3: .same(proto: "email"), + 4: .standard(proto: "vendor_extension"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Contact) -> Bool { + if self.name != other.name {return false} + if self.url != other.url {return false} + if self.email != other.email {return false} + if self.vendorExtension != other.vendorExtension {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Default: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Default) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Definitions: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Definitions) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Document: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "swagger"), + 2: .same(proto: "info"), + 3: .same(proto: "host"), + 4: .standard(proto: "base_path"), + 5: .same(proto: "schemes"), + 6: .same(proto: "consumes"), + 7: .same(proto: "produces"), + 8: .same(proto: "paths"), + 9: .same(proto: "definitions"), + 10: .same(proto: "parameters"), + 11: .same(proto: "responses"), + 12: .same(proto: "security"), + 13: .standard(proto: "security_definitions"), + 14: .same(proto: "tags"), + 15: .standard(proto: "external_docs"), + 16: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _swagger: String = String() + var _info: Openapi_V2_Info? = nil + var _host: String = String() + var _basePath: String = String() + var _schemes: [String] = [] + var _consumes: [String] = [] + var _produces: [String] = [] + var _paths: Openapi_V2_Paths? = nil + var _definitions: Openapi_V2_Definitions? = nil + var _parameters: Openapi_V2_ParameterDefinitions? = nil + var _responses: Openapi_V2_ResponseDefinitions? = nil + var _security: [Openapi_V2_SecurityRequirement] = [] + var _securityDefinitions: Openapi_V2_SecurityDefinitions? = nil + var _tags: [Openapi_V2_Tag] = [] + var _externalDocs: Openapi_V2_ExternalDocs? = nil + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _swagger = source._swagger + _info = source._info + _host = source._host + _basePath = source._basePath + _schemes = source._schemes + _consumes = source._consumes + _produces = source._produces + _paths = source._paths + _definitions = source._definitions + _parameters = source._parameters + _responses = source._responses + _security = source._security + _securityDefinitions = source._securityDefinitions + _tags = source._tags + _externalDocs = source._externalDocs + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Document) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._swagger != other_storage._swagger {return false} + if _storage._info != other_storage._info {return false} + if _storage._host != other_storage._host {return false} + if _storage._basePath != other_storage._basePath {return false} + if _storage._schemes != other_storage._schemes {return false} + if _storage._consumes != other_storage._consumes {return false} + if _storage._produces != other_storage._produces {return false} + if _storage._paths != other_storage._paths {return false} + if _storage._definitions != other_storage._definitions {return false} + if _storage._parameters != other_storage._parameters {return false} + if _storage._responses != other_storage._responses {return false} + if _storage._security != other_storage._security {return false} + if _storage._securityDefinitions != other_storage._securityDefinitions {return false} + if _storage._tags != other_storage._tags {return false} + if _storage._externalDocs != other_storage._externalDocs {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Examples: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Examples) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_ExternalDocs: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "description"), + 2: .same(proto: "url"), + 3: .standard(proto: "vendor_extension"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_ExternalDocs) -> Bool { + if self.description_p != other.description_p {return false} + if self.url != other.url {return false} + if self.vendorExtension != other.vendorExtension {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_FileSchema: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "format"), + 2: .same(proto: "title"), + 3: .same(proto: "description"), + 4: .same(proto: "default"), + 5: .same(proto: "required"), + 6: .same(proto: "type"), + 7: .standard(proto: "read_only"), + 8: .standard(proto: "external_docs"), + 9: .same(proto: "example"), + 10: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _format: String = String() + var _title: String = String() + var _description_p: String = String() + var _default: Openapi_V2_Any? = nil + var _required: [String] = [] + var _type: String = String() + var _readOnly: Bool = false + var _externalDocs: Openapi_V2_ExternalDocs? = nil + var _example: Openapi_V2_Any? = nil + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _format = source._format + _title = source._title + _description_p = source._description_p + _default = source._default + _required = source._required + _type = source._type + _readOnly = source._readOnly + _externalDocs = source._externalDocs + _example = source._example + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_FileSchema) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._format != other_storage._format {return false} + if _storage._title != other_storage._title {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._default != other_storage._default {return false} + if _storage._required != other_storage._required {return false} + if _storage._type != other_storage._type {return false} + if _storage._readOnly != other_storage._readOnly {return false} + if _storage._externalDocs != other_storage._externalDocs {return false} + if _storage._example != other_storage._example {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_FormDataParameterSubSchema: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "required"), + 2: .same(proto: "in"), + 3: .same(proto: "description"), + 4: .same(proto: "name"), + 5: .standard(proto: "allow_empty_value"), + 6: .same(proto: "type"), + 7: .same(proto: "format"), + 8: .same(proto: "items"), + 9: .standard(proto: "collection_format"), + 10: .same(proto: "default"), + 11: .same(proto: "maximum"), + 12: .standard(proto: "exclusive_maximum"), + 13: .same(proto: "minimum"), + 14: .standard(proto: "exclusive_minimum"), + 15: .standard(proto: "max_length"), + 16: .standard(proto: "min_length"), + 17: .same(proto: "pattern"), + 18: .standard(proto: "max_items"), + 19: .standard(proto: "min_items"), + 20: .standard(proto: "unique_items"), + 21: .same(proto: "enum"), + 22: .standard(proto: "multiple_of"), + 23: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _required: Bool = false + var _in: String = String() + var _description_p: String = String() + var _name: String = String() + var _allowEmptyValue: Bool = false + var _type: String = String() + var _format: String = String() + var _items: Openapi_V2_PrimitivesItems? = nil + var _collectionFormat: String = String() + var _default: Openapi_V2_Any? = nil + var _maximum: Double = 0 + var _exclusiveMaximum: Bool = false + var _minimum: Double = 0 + var _exclusiveMinimum: Bool = false + var _maxLength: Int64 = 0 + var _minLength: Int64 = 0 + var _pattern: String = String() + var _maxItems: Int64 = 0 + var _minItems: Int64 = 0 + var _uniqueItems: Bool = false + var _enum: [Openapi_V2_Any] = [] + var _multipleOf: Double = 0 + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _required = source._required + _in = source._in + _description_p = source._description_p + _name = source._name + _allowEmptyValue = source._allowEmptyValue + _type = source._type + _format = source._format + _items = source._items + _collectionFormat = source._collectionFormat + _default = source._default + _maximum = source._maximum + _exclusiveMaximum = source._exclusiveMaximum + _minimum = source._minimum + _exclusiveMinimum = source._exclusiveMinimum + _maxLength = source._maxLength + _minLength = source._minLength + _pattern = source._pattern + _maxItems = source._maxItems + _minItems = source._minItems + _uniqueItems = source._uniqueItems + _enum = source._enum + _multipleOf = source._multipleOf + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_FormDataParameterSubSchema) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._required != other_storage._required {return false} + if _storage._in != other_storage._in {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._name != other_storage._name {return false} + if _storage._allowEmptyValue != other_storage._allowEmptyValue {return false} + if _storage._type != other_storage._type {return false} + if _storage._format != other_storage._format {return false} + if _storage._items != other_storage._items {return false} + if _storage._collectionFormat != other_storage._collectionFormat {return false} + if _storage._default != other_storage._default {return false} + if _storage._maximum != other_storage._maximum {return false} + if _storage._exclusiveMaximum != other_storage._exclusiveMaximum {return false} + if _storage._minimum != other_storage._minimum {return false} + if _storage._exclusiveMinimum != other_storage._exclusiveMinimum {return false} + if _storage._maxLength != other_storage._maxLength {return false} + if _storage._minLength != other_storage._minLength {return false} + if _storage._pattern != other_storage._pattern {return false} + if _storage._maxItems != other_storage._maxItems {return false} + if _storage._minItems != other_storage._minItems {return false} + if _storage._uniqueItems != other_storage._uniqueItems {return false} + if _storage._enum != other_storage._enum {return false} + if _storage._multipleOf != other_storage._multipleOf {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Header: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "type"), + 2: .same(proto: "format"), + 3: .same(proto: "items"), + 4: .standard(proto: "collection_format"), + 5: .same(proto: "default"), + 6: .same(proto: "maximum"), + 7: .standard(proto: "exclusive_maximum"), + 8: .same(proto: "minimum"), + 9: .standard(proto: "exclusive_minimum"), + 10: .standard(proto: "max_length"), + 11: .standard(proto: "min_length"), + 12: .same(proto: "pattern"), + 13: .standard(proto: "max_items"), + 14: .standard(proto: "min_items"), + 15: .standard(proto: "unique_items"), + 16: .same(proto: "enum"), + 17: .standard(proto: "multiple_of"), + 18: .same(proto: "description"), + 19: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _type: String = String() + var _format: String = String() + var _items: Openapi_V2_PrimitivesItems? = nil + var _collectionFormat: String = String() + var _default: Openapi_V2_Any? = nil + var _maximum: Double = 0 + var _exclusiveMaximum: Bool = false + var _minimum: Double = 0 + var _exclusiveMinimum: Bool = false + var _maxLength: Int64 = 0 + var _minLength: Int64 = 0 + var _pattern: String = String() + var _maxItems: Int64 = 0 + var _minItems: Int64 = 0 + var _uniqueItems: Bool = false + var _enum: [Openapi_V2_Any] = [] + var _multipleOf: Double = 0 + var _description_p: String = String() + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _type = source._type + _format = source._format + _items = source._items + _collectionFormat = source._collectionFormat + _default = source._default + _maximum = source._maximum + _exclusiveMaximum = source._exclusiveMaximum + _minimum = source._minimum + _exclusiveMinimum = source._exclusiveMinimum + _maxLength = source._maxLength + _minLength = source._minLength + _pattern = source._pattern + _maxItems = source._maxItems + _minItems = source._minItems + _uniqueItems = source._uniqueItems + _enum = source._enum + _multipleOf = source._multipleOf + _description_p = source._description_p + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Header) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._type != other_storage._type {return false} + if _storage._format != other_storage._format {return false} + if _storage._items != other_storage._items {return false} + if _storage._collectionFormat != other_storage._collectionFormat {return false} + if _storage._default != other_storage._default {return false} + if _storage._maximum != other_storage._maximum {return false} + if _storage._exclusiveMaximum != other_storage._exclusiveMaximum {return false} + if _storage._minimum != other_storage._minimum {return false} + if _storage._exclusiveMinimum != other_storage._exclusiveMinimum {return false} + if _storage._maxLength != other_storage._maxLength {return false} + if _storage._minLength != other_storage._minLength {return false} + if _storage._pattern != other_storage._pattern {return false} + if _storage._maxItems != other_storage._maxItems {return false} + if _storage._minItems != other_storage._minItems {return false} + if _storage._uniqueItems != other_storage._uniqueItems {return false} + if _storage._enum != other_storage._enum {return false} + if _storage._multipleOf != other_storage._multipleOf {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_HeaderParameterSubSchema: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "required"), + 2: .same(proto: "in"), + 3: .same(proto: "description"), + 4: .same(proto: "name"), + 5: .same(proto: "type"), + 6: .same(proto: "format"), + 7: .same(proto: "items"), + 8: .standard(proto: "collection_format"), + 9: .same(proto: "default"), + 10: .same(proto: "maximum"), + 11: .standard(proto: "exclusive_maximum"), + 12: .same(proto: "minimum"), + 13: .standard(proto: "exclusive_minimum"), + 14: .standard(proto: "max_length"), + 15: .standard(proto: "min_length"), + 16: .same(proto: "pattern"), + 17: .standard(proto: "max_items"), + 18: .standard(proto: "min_items"), + 19: .standard(proto: "unique_items"), + 20: .same(proto: "enum"), + 21: .standard(proto: "multiple_of"), + 22: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _required: Bool = false + var _in: String = String() + var _description_p: String = String() + var _name: String = String() + var _type: String = String() + var _format: String = String() + var _items: Openapi_V2_PrimitivesItems? = nil + var _collectionFormat: String = String() + var _default: Openapi_V2_Any? = nil + var _maximum: Double = 0 + var _exclusiveMaximum: Bool = false + var _minimum: Double = 0 + var _exclusiveMinimum: Bool = false + var _maxLength: Int64 = 0 + var _minLength: Int64 = 0 + var _pattern: String = String() + var _maxItems: Int64 = 0 + var _minItems: Int64 = 0 + var _uniqueItems: Bool = false + var _enum: [Openapi_V2_Any] = [] + var _multipleOf: Double = 0 + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _required = source._required + _in = source._in + _description_p = source._description_p + _name = source._name + _type = source._type + _format = source._format + _items = source._items + _collectionFormat = source._collectionFormat + _default = source._default + _maximum = source._maximum + _exclusiveMaximum = source._exclusiveMaximum + _minimum = source._minimum + _exclusiveMinimum = source._exclusiveMinimum + _maxLength = source._maxLength + _minLength = source._minLength + _pattern = source._pattern + _maxItems = source._maxItems + _minItems = source._minItems + _uniqueItems = source._uniqueItems + _enum = source._enum + _multipleOf = source._multipleOf + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_HeaderParameterSubSchema) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._required != other_storage._required {return false} + if _storage._in != other_storage._in {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._name != other_storage._name {return false} + if _storage._type != other_storage._type {return false} + if _storage._format != other_storage._format {return false} + if _storage._items != other_storage._items {return false} + if _storage._collectionFormat != other_storage._collectionFormat {return false} + if _storage._default != other_storage._default {return false} + if _storage._maximum != other_storage._maximum {return false} + if _storage._exclusiveMaximum != other_storage._exclusiveMaximum {return false} + if _storage._minimum != other_storage._minimum {return false} + if _storage._exclusiveMinimum != other_storage._exclusiveMinimum {return false} + if _storage._maxLength != other_storage._maxLength {return false} + if _storage._minLength != other_storage._minLength {return false} + if _storage._pattern != other_storage._pattern {return false} + if _storage._maxItems != other_storage._maxItems {return false} + if _storage._minItems != other_storage._minItems {return false} + if _storage._uniqueItems != other_storage._uniqueItems {return false} + if _storage._enum != other_storage._enum {return false} + if _storage._multipleOf != other_storage._multipleOf {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Headers: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Headers) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Info: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "title"), + 2: .same(proto: "version"), + 3: .same(proto: "description"), + 4: .standard(proto: "terms_of_service"), + 5: .same(proto: "contact"), + 6: .same(proto: "license"), + 7: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _title: String = String() + var _version: String = String() + var _description_p: String = String() + var _termsOfService: String = String() + var _contact: Openapi_V2_Contact? = nil + var _license: Openapi_V2_License? = nil + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _title = source._title + _version = source._version + _description_p = source._description_p + _termsOfService = source._termsOfService + _contact = source._contact + _license = source._license + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Info) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._title != other_storage._title {return false} + if _storage._version != other_storage._version {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._termsOfService != other_storage._termsOfService {return false} + if _storage._contact != other_storage._contact {return false} + if _storage._license != other_storage._license {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_ItemsItem: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "schema"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_ItemsItem) -> Bool { + if self.schema != other.schema {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_JsonReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "_ref"), + 2: .same(proto: "description"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_JsonReference) -> Bool { + if self.ref != other.ref {return false} + if self.description_p != other.description_p {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_License: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "url"), + 3: .standard(proto: "vendor_extension"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_License) -> Bool { + if self.name != other.name {return false} + if self.url != other.url {return false} + if self.vendorExtension != other.vendorExtension {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_NamedAny: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V2_Any? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_NamedAny) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_NamedHeader: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V2_Header? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_NamedHeader) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_NamedParameter: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V2_Parameter? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_NamedParameter) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_NamedPathItem: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V2_PathItem? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_NamedPathItem) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_NamedResponse: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V2_Response? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_NamedResponse) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_NamedResponseValue: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V2_ResponseValue? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_NamedResponseValue) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_NamedSchema: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V2_Schema? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_NamedSchema) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_NamedSecurityDefinitionsItem: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V2_SecurityDefinitionsItem? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_NamedSecurityDefinitionsItem) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_NamedString: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_NamedString) -> Bool { + if self.name != other.name {return false} + if self.value != other.value {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_NamedStringArray: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V2_StringArray? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_NamedStringArray) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_NonBodyParameter: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "header_parameter_sub_schema"), + 2: .standard(proto: "form_data_parameter_sub_schema"), + 3: .standard(proto: "query_parameter_sub_schema"), + 4: .standard(proto: "path_parameter_sub_schema"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V2_NonBodyParameter.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_NonBodyParameter) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Oauth2AccessCodeSecurity: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "type"), + 2: .same(proto: "flow"), + 3: .same(proto: "scopes"), + 4: .standard(proto: "authorization_url"), + 5: .standard(proto: "token_url"), + 6: .same(proto: "description"), + 7: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _type: String = String() + var _flow: String = String() + var _scopes: Openapi_V2_Oauth2Scopes? = nil + var _authorizationURL: String = String() + var _tokenURL: String = String() + var _description_p: String = String() + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _type = source._type + _flow = source._flow + _scopes = source._scopes + _authorizationURL = source._authorizationURL + _tokenURL = source._tokenURL + _description_p = source._description_p + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Oauth2AccessCodeSecurity) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._type != other_storage._type {return false} + if _storage._flow != other_storage._flow {return false} + if _storage._scopes != other_storage._scopes {return false} + if _storage._authorizationURL != other_storage._authorizationURL {return false} + if _storage._tokenURL != other_storage._tokenURL {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Oauth2ApplicationSecurity: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "type"), + 2: .same(proto: "flow"), + 3: .same(proto: "scopes"), + 4: .standard(proto: "token_url"), + 5: .same(proto: "description"), + 6: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _type: String = String() + var _flow: String = String() + var _scopes: Openapi_V2_Oauth2Scopes? = nil + var _tokenURL: String = String() + var _description_p: String = String() + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _type = source._type + _flow = source._flow + _scopes = source._scopes + _tokenURL = source._tokenURL + _description_p = source._description_p + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Oauth2ApplicationSecurity) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._type != other_storage._type {return false} + if _storage._flow != other_storage._flow {return false} + if _storage._scopes != other_storage._scopes {return false} + if _storage._tokenURL != other_storage._tokenURL {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Oauth2ImplicitSecurity: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "type"), + 2: .same(proto: "flow"), + 3: .same(proto: "scopes"), + 4: .standard(proto: "authorization_url"), + 5: .same(proto: "description"), + 6: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _type: String = String() + var _flow: String = String() + var _scopes: Openapi_V2_Oauth2Scopes? = nil + var _authorizationURL: String = String() + var _description_p: String = String() + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _type = source._type + _flow = source._flow + _scopes = source._scopes + _authorizationURL = source._authorizationURL + _description_p = source._description_p + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Oauth2ImplicitSecurity) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._type != other_storage._type {return false} + if _storage._flow != other_storage._flow {return false} + if _storage._scopes != other_storage._scopes {return false} + if _storage._authorizationURL != other_storage._authorizationURL {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Oauth2PasswordSecurity: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "type"), + 2: .same(proto: "flow"), + 3: .same(proto: "scopes"), + 4: .standard(proto: "token_url"), + 5: .same(proto: "description"), + 6: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _type: String = String() + var _flow: String = String() + var _scopes: Openapi_V2_Oauth2Scopes? = nil + var _tokenURL: String = String() + var _description_p: String = String() + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _type = source._type + _flow = source._flow + _scopes = source._scopes + _tokenURL = source._tokenURL + _description_p = source._description_p + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Oauth2PasswordSecurity) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._type != other_storage._type {return false} + if _storage._flow != other_storage._flow {return false} + if _storage._scopes != other_storage._scopes {return false} + if _storage._tokenURL != other_storage._tokenURL {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Oauth2Scopes: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Oauth2Scopes) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Operation: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "tags"), + 2: .same(proto: "summary"), + 3: .same(proto: "description"), + 4: .standard(proto: "external_docs"), + 5: .standard(proto: "operation_id"), + 6: .same(proto: "produces"), + 7: .same(proto: "consumes"), + 8: .same(proto: "parameters"), + 9: .same(proto: "responses"), + 10: .same(proto: "schemes"), + 11: .same(proto: "deprecated"), + 12: .same(proto: "security"), + 13: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _tags: [String] = [] + var _summary: String = String() + var _description_p: String = String() + var _externalDocs: Openapi_V2_ExternalDocs? = nil + var _operationID: String = String() + var _produces: [String] = [] + var _consumes: [String] = [] + var _parameters: [Openapi_V2_ParametersItem] = [] + var _responses: Openapi_V2_Responses? = nil + var _schemes: [String] = [] + var _deprecated: Bool = false + var _security: [Openapi_V2_SecurityRequirement] = [] + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _tags = source._tags + _summary = source._summary + _description_p = source._description_p + _externalDocs = source._externalDocs + _operationID = source._operationID + _produces = source._produces + _consumes = source._consumes + _parameters = source._parameters + _responses = source._responses + _schemes = source._schemes + _deprecated = source._deprecated + _security = source._security + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Operation) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._tags != other_storage._tags {return false} + if _storage._summary != other_storage._summary {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._externalDocs != other_storage._externalDocs {return false} + if _storage._operationID != other_storage._operationID {return false} + if _storage._produces != other_storage._produces {return false} + if _storage._consumes != other_storage._consumes {return false} + if _storage._parameters != other_storage._parameters {return false} + if _storage._responses != other_storage._responses {return false} + if _storage._schemes != other_storage._schemes {return false} + if _storage._deprecated != other_storage._deprecated {return false} + if _storage._security != other_storage._security {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Parameter: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "body_parameter"), + 2: .standard(proto: "non_body_parameter"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V2_Parameter.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Parameter) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_ParameterDefinitions: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_ParameterDefinitions) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_ParametersItem: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "parameter"), + 2: .standard(proto: "json_reference"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V2_ParametersItem.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_ParametersItem) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_PathItem: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "_ref"), + 2: .same(proto: "get"), + 3: .same(proto: "put"), + 4: .same(proto: "post"), + 5: .same(proto: "delete"), + 6: .same(proto: "options"), + 7: .same(proto: "head"), + 8: .same(proto: "patch"), + 9: .same(proto: "parameters"), + 10: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _ref: String = String() + var _get: Openapi_V2_Operation? = nil + var _put: Openapi_V2_Operation? = nil + var _post: Openapi_V2_Operation? = nil + var _delete: Openapi_V2_Operation? = nil + var _options: Openapi_V2_Operation? = nil + var _head: Openapi_V2_Operation? = nil + var _patch: Openapi_V2_Operation? = nil + var _parameters: [Openapi_V2_ParametersItem] = [] + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _ref = source._ref + _get = source._get + _put = source._put + _post = source._post + _delete = source._delete + _options = source._options + _head = source._head + _patch = source._patch + _parameters = source._parameters + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_PathItem) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._ref != other_storage._ref {return false} + if _storage._get != other_storage._get {return false} + if _storage._put != other_storage._put {return false} + if _storage._post != other_storage._post {return false} + if _storage._delete != other_storage._delete {return false} + if _storage._options != other_storage._options {return false} + if _storage._head != other_storage._head {return false} + if _storage._patch != other_storage._patch {return false} + if _storage._parameters != other_storage._parameters {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_PathParameterSubSchema: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "required"), + 2: .same(proto: "in"), + 3: .same(proto: "description"), + 4: .same(proto: "name"), + 5: .same(proto: "type"), + 6: .same(proto: "format"), + 7: .same(proto: "items"), + 8: .standard(proto: "collection_format"), + 9: .same(proto: "default"), + 10: .same(proto: "maximum"), + 11: .standard(proto: "exclusive_maximum"), + 12: .same(proto: "minimum"), + 13: .standard(proto: "exclusive_minimum"), + 14: .standard(proto: "max_length"), + 15: .standard(proto: "min_length"), + 16: .same(proto: "pattern"), + 17: .standard(proto: "max_items"), + 18: .standard(proto: "min_items"), + 19: .standard(proto: "unique_items"), + 20: .same(proto: "enum"), + 21: .standard(proto: "multiple_of"), + 22: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _required: Bool = false + var _in: String = String() + var _description_p: String = String() + var _name: String = String() + var _type: String = String() + var _format: String = String() + var _items: Openapi_V2_PrimitivesItems? = nil + var _collectionFormat: String = String() + var _default: Openapi_V2_Any? = nil + var _maximum: Double = 0 + var _exclusiveMaximum: Bool = false + var _minimum: Double = 0 + var _exclusiveMinimum: Bool = false + var _maxLength: Int64 = 0 + var _minLength: Int64 = 0 + var _pattern: String = String() + var _maxItems: Int64 = 0 + var _minItems: Int64 = 0 + var _uniqueItems: Bool = false + var _enum: [Openapi_V2_Any] = [] + var _multipleOf: Double = 0 + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _required = source._required + _in = source._in + _description_p = source._description_p + _name = source._name + _type = source._type + _format = source._format + _items = source._items + _collectionFormat = source._collectionFormat + _default = source._default + _maximum = source._maximum + _exclusiveMaximum = source._exclusiveMaximum + _minimum = source._minimum + _exclusiveMinimum = source._exclusiveMinimum + _maxLength = source._maxLength + _minLength = source._minLength + _pattern = source._pattern + _maxItems = source._maxItems + _minItems = source._minItems + _uniqueItems = source._uniqueItems + _enum = source._enum + _multipleOf = source._multipleOf + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_PathParameterSubSchema) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._required != other_storage._required {return false} + if _storage._in != other_storage._in {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._name != other_storage._name {return false} + if _storage._type != other_storage._type {return false} + if _storage._format != other_storage._format {return false} + if _storage._items != other_storage._items {return false} + if _storage._collectionFormat != other_storage._collectionFormat {return false} + if _storage._default != other_storage._default {return false} + if _storage._maximum != other_storage._maximum {return false} + if _storage._exclusiveMaximum != other_storage._exclusiveMaximum {return false} + if _storage._minimum != other_storage._minimum {return false} + if _storage._exclusiveMinimum != other_storage._exclusiveMinimum {return false} + if _storage._maxLength != other_storage._maxLength {return false} + if _storage._minLength != other_storage._minLength {return false} + if _storage._pattern != other_storage._pattern {return false} + if _storage._maxItems != other_storage._maxItems {return false} + if _storage._minItems != other_storage._minItems {return false} + if _storage._uniqueItems != other_storage._uniqueItems {return false} + if _storage._enum != other_storage._enum {return false} + if _storage._multipleOf != other_storage._multipleOf {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Paths: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "vendor_extension"), + 2: .same(proto: "path"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Paths) -> Bool { + if self.vendorExtension != other.vendorExtension {return false} + if self.path != other.path {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_PrimitivesItems: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "type"), + 2: .same(proto: "format"), + 3: .same(proto: "items"), + 4: .standard(proto: "collection_format"), + 5: .same(proto: "default"), + 6: .same(proto: "maximum"), + 7: .standard(proto: "exclusive_maximum"), + 8: .same(proto: "minimum"), + 9: .standard(proto: "exclusive_minimum"), + 10: .standard(proto: "max_length"), + 11: .standard(proto: "min_length"), + 12: .same(proto: "pattern"), + 13: .standard(proto: "max_items"), + 14: .standard(proto: "min_items"), + 15: .standard(proto: "unique_items"), + 16: .same(proto: "enum"), + 17: .standard(proto: "multiple_of"), + 18: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _type: String = String() + var _format: String = String() + var _items: Openapi_V2_PrimitivesItems? = nil + var _collectionFormat: String = String() + var _default: Openapi_V2_Any? = nil + var _maximum: Double = 0 + var _exclusiveMaximum: Bool = false + var _minimum: Double = 0 + var _exclusiveMinimum: Bool = false + var _maxLength: Int64 = 0 + var _minLength: Int64 = 0 + var _pattern: String = String() + var _maxItems: Int64 = 0 + var _minItems: Int64 = 0 + var _uniqueItems: Bool = false + var _enum: [Openapi_V2_Any] = [] + var _multipleOf: Double = 0 + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _type = source._type + _format = source._format + _items = source._items + _collectionFormat = source._collectionFormat + _default = source._default + _maximum = source._maximum + _exclusiveMaximum = source._exclusiveMaximum + _minimum = source._minimum + _exclusiveMinimum = source._exclusiveMinimum + _maxLength = source._maxLength + _minLength = source._minLength + _pattern = source._pattern + _maxItems = source._maxItems + _minItems = source._minItems + _uniqueItems = source._uniqueItems + _enum = source._enum + _multipleOf = source._multipleOf + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_PrimitivesItems) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._type != other_storage._type {return false} + if _storage._format != other_storage._format {return false} + if _storage._items != other_storage._items {return false} + if _storage._collectionFormat != other_storage._collectionFormat {return false} + if _storage._default != other_storage._default {return false} + if _storage._maximum != other_storage._maximum {return false} + if _storage._exclusiveMaximum != other_storage._exclusiveMaximum {return false} + if _storage._minimum != other_storage._minimum {return false} + if _storage._exclusiveMinimum != other_storage._exclusiveMinimum {return false} + if _storage._maxLength != other_storage._maxLength {return false} + if _storage._minLength != other_storage._minLength {return false} + if _storage._pattern != other_storage._pattern {return false} + if _storage._maxItems != other_storage._maxItems {return false} + if _storage._minItems != other_storage._minItems {return false} + if _storage._uniqueItems != other_storage._uniqueItems {return false} + if _storage._enum != other_storage._enum {return false} + if _storage._multipleOf != other_storage._multipleOf {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Properties: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Properties) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_QueryParameterSubSchema: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "required"), + 2: .same(proto: "in"), + 3: .same(proto: "description"), + 4: .same(proto: "name"), + 5: .standard(proto: "allow_empty_value"), + 6: .same(proto: "type"), + 7: .same(proto: "format"), + 8: .same(proto: "items"), + 9: .standard(proto: "collection_format"), + 10: .same(proto: "default"), + 11: .same(proto: "maximum"), + 12: .standard(proto: "exclusive_maximum"), + 13: .same(proto: "minimum"), + 14: .standard(proto: "exclusive_minimum"), + 15: .standard(proto: "max_length"), + 16: .standard(proto: "min_length"), + 17: .same(proto: "pattern"), + 18: .standard(proto: "max_items"), + 19: .standard(proto: "min_items"), + 20: .standard(proto: "unique_items"), + 21: .same(proto: "enum"), + 22: .standard(proto: "multiple_of"), + 23: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _required: Bool = false + var _in: String = String() + var _description_p: String = String() + var _name: String = String() + var _allowEmptyValue: Bool = false + var _type: String = String() + var _format: String = String() + var _items: Openapi_V2_PrimitivesItems? = nil + var _collectionFormat: String = String() + var _default: Openapi_V2_Any? = nil + var _maximum: Double = 0 + var _exclusiveMaximum: Bool = false + var _minimum: Double = 0 + var _exclusiveMinimum: Bool = false + var _maxLength: Int64 = 0 + var _minLength: Int64 = 0 + var _pattern: String = String() + var _maxItems: Int64 = 0 + var _minItems: Int64 = 0 + var _uniqueItems: Bool = false + var _enum: [Openapi_V2_Any] = [] + var _multipleOf: Double = 0 + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _required = source._required + _in = source._in + _description_p = source._description_p + _name = source._name + _allowEmptyValue = source._allowEmptyValue + _type = source._type + _format = source._format + _items = source._items + _collectionFormat = source._collectionFormat + _default = source._default + _maximum = source._maximum + _exclusiveMaximum = source._exclusiveMaximum + _minimum = source._minimum + _exclusiveMinimum = source._exclusiveMinimum + _maxLength = source._maxLength + _minLength = source._minLength + _pattern = source._pattern + _maxItems = source._maxItems + _minItems = source._minItems + _uniqueItems = source._uniqueItems + _enum = source._enum + _multipleOf = source._multipleOf + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_QueryParameterSubSchema) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._required != other_storage._required {return false} + if _storage._in != other_storage._in {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._name != other_storage._name {return false} + if _storage._allowEmptyValue != other_storage._allowEmptyValue {return false} + if _storage._type != other_storage._type {return false} + if _storage._format != other_storage._format {return false} + if _storage._items != other_storage._items {return false} + if _storage._collectionFormat != other_storage._collectionFormat {return false} + if _storage._default != other_storage._default {return false} + if _storage._maximum != other_storage._maximum {return false} + if _storage._exclusiveMaximum != other_storage._exclusiveMaximum {return false} + if _storage._minimum != other_storage._minimum {return false} + if _storage._exclusiveMinimum != other_storage._exclusiveMinimum {return false} + if _storage._maxLength != other_storage._maxLength {return false} + if _storage._minLength != other_storage._minLength {return false} + if _storage._pattern != other_storage._pattern {return false} + if _storage._maxItems != other_storage._maxItems {return false} + if _storage._minItems != other_storage._minItems {return false} + if _storage._uniqueItems != other_storage._uniqueItems {return false} + if _storage._enum != other_storage._enum {return false} + if _storage._multipleOf != other_storage._multipleOf {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Response: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "description"), + 2: .same(proto: "schema"), + 3: .same(proto: "headers"), + 4: .same(proto: "examples"), + 5: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _description_p: String = String() + var _schema: Openapi_V2_SchemaItem? = nil + var _headers: Openapi_V2_Headers? = nil + var _examples: Openapi_V2_Examples? = nil + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _description_p = source._description_p + _schema = source._schema + _headers = source._headers + _examples = source._examples + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Response) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._description_p != other_storage._description_p {return false} + if _storage._schema != other_storage._schema {return false} + if _storage._headers != other_storage._headers {return false} + if _storage._examples != other_storage._examples {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_ResponseDefinitions: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_ResponseDefinitions) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_ResponseValue: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "response"), + 2: .standard(proto: "json_reference"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V2_ResponseValue.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_ResponseValue) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Responses: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "response_code"), + 2: .standard(proto: "vendor_extension"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Responses) -> Bool { + if self.responseCode != other.responseCode {return false} + if self.vendorExtension != other.vendorExtension {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Schema: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "_ref"), + 2: .same(proto: "format"), + 3: .same(proto: "title"), + 4: .same(proto: "description"), + 5: .same(proto: "default"), + 6: .standard(proto: "multiple_of"), + 7: .same(proto: "maximum"), + 8: .standard(proto: "exclusive_maximum"), + 9: .same(proto: "minimum"), + 10: .standard(proto: "exclusive_minimum"), + 11: .standard(proto: "max_length"), + 12: .standard(proto: "min_length"), + 13: .same(proto: "pattern"), + 14: .standard(proto: "max_items"), + 15: .standard(proto: "min_items"), + 16: .standard(proto: "unique_items"), + 17: .standard(proto: "max_properties"), + 18: .standard(proto: "min_properties"), + 19: .same(proto: "required"), + 20: .same(proto: "enum"), + 21: .standard(proto: "additional_properties"), + 22: .same(proto: "type"), + 23: .same(proto: "items"), + 24: .standard(proto: "all_of"), + 25: .same(proto: "properties"), + 26: .same(proto: "discriminator"), + 27: .standard(proto: "read_only"), + 28: .same(proto: "xml"), + 29: .standard(proto: "external_docs"), + 30: .same(proto: "example"), + 31: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _ref: String = String() + var _format: String = String() + var _title: String = String() + var _description_p: String = String() + var _default: Openapi_V2_Any? = nil + var _multipleOf: Double = 0 + var _maximum: Double = 0 + var _exclusiveMaximum: Bool = false + var _minimum: Double = 0 + var _exclusiveMinimum: Bool = false + var _maxLength: Int64 = 0 + var _minLength: Int64 = 0 + var _pattern: String = String() + var _maxItems: Int64 = 0 + var _minItems: Int64 = 0 + var _uniqueItems: Bool = false + var _maxProperties: Int64 = 0 + var _minProperties: Int64 = 0 + var _required: [String] = [] + var _enum: [Openapi_V2_Any] = [] + var _additionalProperties: Openapi_V2_AdditionalPropertiesItem? = nil + var _type: Openapi_V2_TypeItem? = nil + var _items: Openapi_V2_ItemsItem? = nil + var _allOf: [Openapi_V2_Schema] = [] + var _properties: Openapi_V2_Properties? = nil + var _discriminator: String = String() + var _readOnly: Bool = false + var _xml: Openapi_V2_Xml? = nil + var _externalDocs: Openapi_V2_ExternalDocs? = nil + var _example: Openapi_V2_Any? = nil + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _ref = source._ref + _format = source._format + _title = source._title + _description_p = source._description_p + _default = source._default + _multipleOf = source._multipleOf + _maximum = source._maximum + _exclusiveMaximum = source._exclusiveMaximum + _minimum = source._minimum + _exclusiveMinimum = source._exclusiveMinimum + _maxLength = source._maxLength + _minLength = source._minLength + _pattern = source._pattern + _maxItems = source._maxItems + _minItems = source._minItems + _uniqueItems = source._uniqueItems + _maxProperties = source._maxProperties + _minProperties = source._minProperties + _required = source._required + _enum = source._enum + _additionalProperties = source._additionalProperties + _type = source._type + _items = source._items + _allOf = source._allOf + _properties = source._properties + _discriminator = source._discriminator + _readOnly = source._readOnly + _xml = source._xml + _externalDocs = source._externalDocs + _example = source._example + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Schema) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._ref != other_storage._ref {return false} + if _storage._format != other_storage._format {return false} + if _storage._title != other_storage._title {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._default != other_storage._default {return false} + if _storage._multipleOf != other_storage._multipleOf {return false} + if _storage._maximum != other_storage._maximum {return false} + if _storage._exclusiveMaximum != other_storage._exclusiveMaximum {return false} + if _storage._minimum != other_storage._minimum {return false} + if _storage._exclusiveMinimum != other_storage._exclusiveMinimum {return false} + if _storage._maxLength != other_storage._maxLength {return false} + if _storage._minLength != other_storage._minLength {return false} + if _storage._pattern != other_storage._pattern {return false} + if _storage._maxItems != other_storage._maxItems {return false} + if _storage._minItems != other_storage._minItems {return false} + if _storage._uniqueItems != other_storage._uniqueItems {return false} + if _storage._maxProperties != other_storage._maxProperties {return false} + if _storage._minProperties != other_storage._minProperties {return false} + if _storage._required != other_storage._required {return false} + if _storage._enum != other_storage._enum {return false} + if _storage._additionalProperties != other_storage._additionalProperties {return false} + if _storage._type != other_storage._type {return false} + if _storage._items != other_storage._items {return false} + if _storage._allOf != other_storage._allOf {return false} + if _storage._properties != other_storage._properties {return false} + if _storage._discriminator != other_storage._discriminator {return false} + if _storage._readOnly != other_storage._readOnly {return false} + if _storage._xml != other_storage._xml {return false} + if _storage._externalDocs != other_storage._externalDocs {return false} + if _storage._example != other_storage._example {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_SchemaItem: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "schema"), + 2: .standard(proto: "file_schema"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V2_SchemaItem.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_SchemaItem) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_SecurityDefinitions: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_SecurityDefinitions) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_SecurityDefinitionsItem: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "basic_authentication_security"), + 2: .standard(proto: "api_key_security"), + 3: .standard(proto: "oauth2_implicit_security"), + 4: .standard(proto: "oauth2_password_security"), + 5: .standard(proto: "oauth2_application_security"), + 6: .standard(proto: "oauth2_access_code_security"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V2_SecurityDefinitionsItem.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_SecurityDefinitionsItem) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_SecurityRequirement: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_SecurityRequirement) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_StringArray: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "value"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_StringArray) -> Bool { + if self.value != other.value {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Tag: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "description"), + 3: .standard(proto: "external_docs"), + 4: .standard(proto: "vendor_extension"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _description_p: String = String() + var _externalDocs: Openapi_V2_ExternalDocs? = nil + var _vendorExtension: [Openapi_V2_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _description_p = source._description_p + _externalDocs = source._externalDocs + _vendorExtension = source._vendorExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Tag) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._externalDocs != other_storage._externalDocs {return false} + if _storage._vendorExtension != other_storage._vendorExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_TypeItem: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "value"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_TypeItem) -> Bool { + if self.value != other.value {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_VendorExtension: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_VendorExtension) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V2_Xml: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "namespace"), + 3: .same(proto: "prefix"), + 4: .same(proto: "attribute"), + 5: .same(proto: "wrapped"), + 6: .standard(proto: "vendor_extension"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V2_Xml) -> Bool { + if self.name != other.name {return false} + if self.namespace != other.namespace {return false} + if self.prefix != other.prefix {return false} + if self.attribute != other.attribute {return false} + if self.wrapped != other.wrapped {return false} + if self.vendorExtension != other.vendorExtension {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-sample/Sources/Gnostic/OpenAPIv3.pb.swift b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-sample/Sources/Gnostic/OpenAPIv3.pb.swift new file mode 100644 index 000000000..0f0f71403 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-sample/Sources/Gnostic/OpenAPIv3.pb.swift @@ -0,0 +1,8849 @@ +// DO NOT EDIT. +// +// Generated by the Swift generator plugin for the protocol buffer compiler. +// Source: github.com/googleapis/gnostic/OpenAPIv3/OpenAPIv3.proto +// +// For information on using the generated types, please see the documenation: +// https://github.com/apple/swift-protobuf/ + +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// THIS FILE IS AUTOMATICALLY GENERATED. + +import Foundation +import SwiftProtobuf + +// If the compiler emits an error on this type, it is because this file +// was generated by a version of the `protoc` Swift plug-in that is +// incompatible with the version of SwiftProtobuf to which you are linking. +// Please ensure that your are building against the same version of the API +// that was used to generate this file. +fileprivate struct _GeneratedWithProtocGenSwiftVersion: SwiftProtobuf.ProtobufAPIVersionCheck { + struct _2: SwiftProtobuf.ProtobufAPIVersion_2 {} + typealias Version = _2 +} + +public struct Openapi_V3_AdditionalPropertiesItem: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".AdditionalPropertiesItem" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var schemaOrReference: Openapi_V3_SchemaOrReference { + get { + if case .schemaOrReference(let v)? = _storage._oneof {return v} + return Openapi_V3_SchemaOrReference() + } + set {_uniqueStorage()._oneof = .schemaOrReference(newValue)} + } + + public var boolean: Bool { + get { + if case .boolean(let v)? = _storage._oneof {return v} + return false + } + set {_uniqueStorage()._oneof = .boolean(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case schemaOrReference(Openapi_V3_SchemaOrReference) + case boolean(Bool) + + public static func ==(lhs: Openapi_V3_AdditionalPropertiesItem.OneOf_Oneof, rhs: Openapi_V3_AdditionalPropertiesItem.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.schemaOrReference(let l), .schemaOrReference(let r)): return l == r + case (.boolean(let l), .boolean(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V3_SchemaOrReference? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .schemaOrReference(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .schemaOrReference(v)} + case 2: + if _storage._oneof != nil {try decoder.handleConflictingOneOf()} + var v: Bool? + try decoder.decodeSingularBoolField(value: &v) + if let v = v {_storage._oneof = .boolean(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .schemaOrReference(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .boolean(let v)?: + try visitor.visitSingularBoolField(value: v, fieldNumber: 2) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_Any: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Any" + + public var value: SwiftProtobuf.Google_Protobuf_Any { + get {return _storage._value ?? SwiftProtobuf.Google_Protobuf_Any()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var yaml: String { + get {return _storage._yaml} + set {_uniqueStorage()._yaml = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularMessageField(value: &_storage._value) + case 2: try decoder.decodeSingularStringField(value: &_storage._yaml) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + } + if !_storage._yaml.isEmpty { + try visitor.visitSingularStringField(value: _storage._yaml, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_AnyOrExpression: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".AnyOrExpression" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var any: Openapi_V3_Any { + get { + if case .any(let v)? = _storage._oneof {return v} + return Openapi_V3_Any() + } + set {_uniqueStorage()._oneof = .any(newValue)} + } + + public var expression: Openapi_V3_Expression { + get { + if case .expression(let v)? = _storage._oneof {return v} + return Openapi_V3_Expression() + } + set {_uniqueStorage()._oneof = .expression(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case any(Openapi_V3_Any) + case expression(Openapi_V3_Expression) + + public static func ==(lhs: Openapi_V3_AnyOrExpression.OneOf_Oneof, rhs: Openapi_V3_AnyOrExpression.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.any(let l), .any(let r)): return l == r + case (.expression(let l), .expression(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V3_Any? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .any(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .any(v)} + case 2: + var v: Openapi_V3_Expression? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .expression(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .expression(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .any(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .expression(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_AnysOrExpressions: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".AnysOrExpressions" + + public var additionalProperties: [Openapi_V3_NamedAnyOrExpression] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// A map of possible out-of band callbacks related to the parent operation. Each value in the map is a Path Item Object that describes a set of requests that may be initiated by the API provider and the expected responses. The key value used to identify the callback object is an expression, evaluated at runtime, that identifies a URL to use for the callback operation. +public struct Openapi_V3_Callback: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Callback" + + public var path: [Openapi_V3_NamedPathItem] = [] + + public var specificationExtension: [Openapi_V3_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.path) + case 2: try decoder.decodeRepeatedMessageField(value: &self.specificationExtension) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.path.isEmpty { + try visitor.visitRepeatedMessageField(value: self.path, fieldNumber: 1) + } + if !self.specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: self.specificationExtension, fieldNumber: 2) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V3_CallbackOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".CallbackOrReference" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var callback: Openapi_V3_Callback { + get { + if case .callback(let v)? = _storage._oneof {return v} + return Openapi_V3_Callback() + } + set {_uniqueStorage()._oneof = .callback(newValue)} + } + + public var reference: Openapi_V3_Reference { + get { + if case .reference(let v)? = _storage._oneof {return v} + return Openapi_V3_Reference() + } + set {_uniqueStorage()._oneof = .reference(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case callback(Openapi_V3_Callback) + case reference(Openapi_V3_Reference) + + public static func ==(lhs: Openapi_V3_CallbackOrReference.OneOf_Oneof, rhs: Openapi_V3_CallbackOrReference.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.callback(let l), .callback(let r)): return l == r + case (.reference(let l), .reference(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V3_Callback? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .callback(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .callback(v)} + case 2: + var v: Openapi_V3_Reference? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .reference(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .reference(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .callback(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .reference(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_CallbacksOrReferences: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".CallbacksOrReferences" + + public var additionalProperties: [Openapi_V3_NamedCallbackOrReference] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Holds a set of reusable objects for different aspects of the OAS. All objects defined within the components object will have no effect on the API unless they are explicitly referenced from properties outside the components object. +public struct Openapi_V3_Components: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Components" + + public var schemas: Openapi_V3_SchemasOrReferences { + get {return _storage._schemas ?? Openapi_V3_SchemasOrReferences()} + set {_uniqueStorage()._schemas = newValue} + } + /// Returns true if `schemas` has been explicitly set. + public var hasSchemas: Bool {return _storage._schemas != nil} + /// Clears the value of `schemas`. Subsequent reads from it will return its default value. + public mutating func clearSchemas() {_storage._schemas = nil} + + public var responses: Openapi_V3_ResponsesOrReferences { + get {return _storage._responses ?? Openapi_V3_ResponsesOrReferences()} + set {_uniqueStorage()._responses = newValue} + } + /// Returns true if `responses` has been explicitly set. + public var hasResponses: Bool {return _storage._responses != nil} + /// Clears the value of `responses`. Subsequent reads from it will return its default value. + public mutating func clearResponses() {_storage._responses = nil} + + public var parameters: Openapi_V3_ParametersOrReferences { + get {return _storage._parameters ?? Openapi_V3_ParametersOrReferences()} + set {_uniqueStorage()._parameters = newValue} + } + /// Returns true if `parameters` has been explicitly set. + public var hasParameters: Bool {return _storage._parameters != nil} + /// Clears the value of `parameters`. Subsequent reads from it will return its default value. + public mutating func clearParameters() {_storage._parameters = nil} + + public var examples: Openapi_V3_ExamplesOrReferences { + get {return _storage._examples ?? Openapi_V3_ExamplesOrReferences()} + set {_uniqueStorage()._examples = newValue} + } + /// Returns true if `examples` has been explicitly set. + public var hasExamples: Bool {return _storage._examples != nil} + /// Clears the value of `examples`. Subsequent reads from it will return its default value. + public mutating func clearExamples() {_storage._examples = nil} + + public var requestBodies: Openapi_V3_RequestBodiesOrReferences { + get {return _storage._requestBodies ?? Openapi_V3_RequestBodiesOrReferences()} + set {_uniqueStorage()._requestBodies = newValue} + } + /// Returns true if `requestBodies` has been explicitly set. + public var hasRequestBodies: Bool {return _storage._requestBodies != nil} + /// Clears the value of `requestBodies`. Subsequent reads from it will return its default value. + public mutating func clearRequestBodies() {_storage._requestBodies = nil} + + public var headers: Openapi_V3_HeadersOrReferences { + get {return _storage._headers ?? Openapi_V3_HeadersOrReferences()} + set {_uniqueStorage()._headers = newValue} + } + /// Returns true if `headers` has been explicitly set. + public var hasHeaders: Bool {return _storage._headers != nil} + /// Clears the value of `headers`. Subsequent reads from it will return its default value. + public mutating func clearHeaders() {_storage._headers = nil} + + public var securitySchemes: Openapi_V3_SecuritySchemesOrReferences { + get {return _storage._securitySchemes ?? Openapi_V3_SecuritySchemesOrReferences()} + set {_uniqueStorage()._securitySchemes = newValue} + } + /// Returns true if `securitySchemes` has been explicitly set. + public var hasSecuritySchemes: Bool {return _storage._securitySchemes != nil} + /// Clears the value of `securitySchemes`. Subsequent reads from it will return its default value. + public mutating func clearSecuritySchemes() {_storage._securitySchemes = nil} + + public var links: Openapi_V3_LinksOrReferences { + get {return _storage._links ?? Openapi_V3_LinksOrReferences()} + set {_uniqueStorage()._links = newValue} + } + /// Returns true if `links` has been explicitly set. + public var hasLinks: Bool {return _storage._links != nil} + /// Clears the value of `links`. Subsequent reads from it will return its default value. + public mutating func clearLinks() {_storage._links = nil} + + public var callbacks: Openapi_V3_CallbacksOrReferences { + get {return _storage._callbacks ?? Openapi_V3_CallbacksOrReferences()} + set {_uniqueStorage()._callbacks = newValue} + } + /// Returns true if `callbacks` has been explicitly set. + public var hasCallbacks: Bool {return _storage._callbacks != nil} + /// Clears the value of `callbacks`. Subsequent reads from it will return its default value. + public mutating func clearCallbacks() {_storage._callbacks = nil} + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularMessageField(value: &_storage._schemas) + case 2: try decoder.decodeSingularMessageField(value: &_storage._responses) + case 3: try decoder.decodeSingularMessageField(value: &_storage._parameters) + case 4: try decoder.decodeSingularMessageField(value: &_storage._examples) + case 5: try decoder.decodeSingularMessageField(value: &_storage._requestBodies) + case 6: try decoder.decodeSingularMessageField(value: &_storage._headers) + case 7: try decoder.decodeSingularMessageField(value: &_storage._securitySchemes) + case 8: try decoder.decodeSingularMessageField(value: &_storage._links) + case 9: try decoder.decodeSingularMessageField(value: &_storage._callbacks) + case 10: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if let v = _storage._schemas { + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + } + if let v = _storage._responses { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + if let v = _storage._parameters { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if let v = _storage._examples { + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + } + if let v = _storage._requestBodies { + try visitor.visitSingularMessageField(value: v, fieldNumber: 5) + } + if let v = _storage._headers { + try visitor.visitSingularMessageField(value: v, fieldNumber: 6) + } + if let v = _storage._securitySchemes { + try visitor.visitSingularMessageField(value: v, fieldNumber: 7) + } + if let v = _storage._links { + try visitor.visitSingularMessageField(value: v, fieldNumber: 8) + } + if let v = _storage._callbacks { + try visitor.visitSingularMessageField(value: v, fieldNumber: 9) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 10) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Contact information for the exposed API. +public struct Openapi_V3_Contact: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Contact" + + public var name: String = String() + + public var url: String = String() + + public var email: String = String() + + public var specificationExtension: [Openapi_V3_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.name) + case 2: try decoder.decodeSingularStringField(value: &self.url) + case 3: try decoder.decodeSingularStringField(value: &self.email) + case 4: try decoder.decodeRepeatedMessageField(value: &self.specificationExtension) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.name.isEmpty { + try visitor.visitSingularStringField(value: self.name, fieldNumber: 1) + } + if !self.url.isEmpty { + try visitor.visitSingularStringField(value: self.url, fieldNumber: 2) + } + if !self.email.isEmpty { + try visitor.visitSingularStringField(value: self.email, fieldNumber: 3) + } + if !self.specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: self.specificationExtension, fieldNumber: 4) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V3_DefaultType: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".DefaultType" + + public var oneof: Openapi_V3_DefaultType.OneOf_Oneof? = nil + + public var number: Double { + get { + if case .number(let v)? = oneof {return v} + return 0 + } + set {oneof = .number(newValue)} + } + + public var boolean: Bool { + get { + if case .boolean(let v)? = oneof {return v} + return false + } + set {oneof = .boolean(newValue)} + } + + public var string: String { + get { + if case .string(let v)? = oneof {return v} + return String() + } + set {oneof = .string(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case number(Double) + case boolean(Bool) + case string(String) + + public static func ==(lhs: Openapi_V3_DefaultType.OneOf_Oneof, rhs: Openapi_V3_DefaultType.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.number(let l), .number(let r)): return l == r + case (.boolean(let l), .boolean(let r)): return l == r + case (.string(let l), .string(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + if self.oneof != nil {try decoder.handleConflictingOneOf()} + var v: Double? + try decoder.decodeSingularDoubleField(value: &v) + if let v = v {self.oneof = .number(v)} + case 2: + if self.oneof != nil {try decoder.handleConflictingOneOf()} + var v: Bool? + try decoder.decodeSingularBoolField(value: &v) + if let v = v {self.oneof = .boolean(v)} + case 3: + if self.oneof != nil {try decoder.handleConflictingOneOf()} + var v: String? + try decoder.decodeSingularStringField(value: &v) + if let v = v {self.oneof = .string(v)} + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + switch self.oneof { + case .number(let v)?: + try visitor.visitSingularDoubleField(value: v, fieldNumber: 1) + case .boolean(let v)?: + try visitor.visitSingularBoolField(value: v, fieldNumber: 2) + case .string(let v)?: + try visitor.visitSingularStringField(value: v, fieldNumber: 3) + case nil: break + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// When request bodies or response payloads may be one of a number of different schemas, a `discriminator` object can be used to aid in serialization, deserialization, and validation. The discriminator is a specific object in a schema which is used to inform the consumer of the specification of an alternative schema based on the value associated with it. When using the discriminator, _inline_ schemas will not be considered. +public struct Openapi_V3_Discriminator: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Discriminator" + + public var propertyName: String { + get {return _storage._propertyName} + set {_uniqueStorage()._propertyName = newValue} + } + + public var mapping: Openapi_V3_Strings { + get {return _storage._mapping ?? Openapi_V3_Strings()} + set {_uniqueStorage()._mapping = newValue} + } + /// Returns true if `mapping` has been explicitly set. + public var hasMapping: Bool {return _storage._mapping != nil} + /// Clears the value of `mapping`. Subsequent reads from it will return its default value. + public mutating func clearMapping() {_storage._mapping = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._propertyName) + case 2: try decoder.decodeSingularMessageField(value: &_storage._mapping) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._propertyName.isEmpty { + try visitor.visitSingularStringField(value: _storage._propertyName, fieldNumber: 1) + } + if let v = _storage._mapping { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_Document: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Document" + + public var openapi: String { + get {return _storage._openapi} + set {_uniqueStorage()._openapi = newValue} + } + + public var info: Openapi_V3_Info { + get {return _storage._info ?? Openapi_V3_Info()} + set {_uniqueStorage()._info = newValue} + } + /// Returns true if `info` has been explicitly set. + public var hasInfo: Bool {return _storage._info != nil} + /// Clears the value of `info`. Subsequent reads from it will return its default value. + public mutating func clearInfo() {_storage._info = nil} + + public var servers: [Openapi_V3_Server] { + get {return _storage._servers} + set {_uniqueStorage()._servers = newValue} + } + + public var paths: Openapi_V3_Paths { + get {return _storage._paths ?? Openapi_V3_Paths()} + set {_uniqueStorage()._paths = newValue} + } + /// Returns true if `paths` has been explicitly set. + public var hasPaths: Bool {return _storage._paths != nil} + /// Clears the value of `paths`. Subsequent reads from it will return its default value. + public mutating func clearPaths() {_storage._paths = nil} + + public var components: Openapi_V3_Components { + get {return _storage._components ?? Openapi_V3_Components()} + set {_uniqueStorage()._components = newValue} + } + /// Returns true if `components` has been explicitly set. + public var hasComponents: Bool {return _storage._components != nil} + /// Clears the value of `components`. Subsequent reads from it will return its default value. + public mutating func clearComponents() {_storage._components = nil} + + public var security: [Openapi_V3_SecurityRequirement] { + get {return _storage._security} + set {_uniqueStorage()._security = newValue} + } + + public var tags: [Openapi_V3_Tag] { + get {return _storage._tags} + set {_uniqueStorage()._tags = newValue} + } + + public var externalDocs: Openapi_V3_ExternalDocs { + get {return _storage._externalDocs ?? Openapi_V3_ExternalDocs()} + set {_uniqueStorage()._externalDocs = newValue} + } + /// Returns true if `externalDocs` has been explicitly set. + public var hasExternalDocs: Bool {return _storage._externalDocs != nil} + /// Clears the value of `externalDocs`. Subsequent reads from it will return its default value. + public mutating func clearExternalDocs() {_storage._externalDocs = nil} + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._openapi) + case 2: try decoder.decodeSingularMessageField(value: &_storage._info) + case 3: try decoder.decodeRepeatedMessageField(value: &_storage._servers) + case 4: try decoder.decodeSingularMessageField(value: &_storage._paths) + case 5: try decoder.decodeSingularMessageField(value: &_storage._components) + case 6: try decoder.decodeRepeatedMessageField(value: &_storage._security) + case 7: try decoder.decodeRepeatedMessageField(value: &_storage._tags) + case 8: try decoder.decodeSingularMessageField(value: &_storage._externalDocs) + case 9: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._openapi.isEmpty { + try visitor.visitSingularStringField(value: _storage._openapi, fieldNumber: 1) + } + if let v = _storage._info { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + if !_storage._servers.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._servers, fieldNumber: 3) + } + if let v = _storage._paths { + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + } + if let v = _storage._components { + try visitor.visitSingularMessageField(value: v, fieldNumber: 5) + } + if !_storage._security.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._security, fieldNumber: 6) + } + if !_storage._tags.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._tags, fieldNumber: 7) + } + if let v = _storage._externalDocs { + try visitor.visitSingularMessageField(value: v, fieldNumber: 8) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 9) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// A single encoding definition applied to a single schema property. +public struct Openapi_V3_Encoding: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Encoding" + + public var contentType: String { + get {return _storage._contentType} + set {_uniqueStorage()._contentType = newValue} + } + + public var headers: Openapi_V3_HeadersOrReferences { + get {return _storage._headers ?? Openapi_V3_HeadersOrReferences()} + set {_uniqueStorage()._headers = newValue} + } + /// Returns true if `headers` has been explicitly set. + public var hasHeaders: Bool {return _storage._headers != nil} + /// Clears the value of `headers`. Subsequent reads from it will return its default value. + public mutating func clearHeaders() {_storage._headers = nil} + + public var style: String { + get {return _storage._style} + set {_uniqueStorage()._style = newValue} + } + + public var explode: Bool { + get {return _storage._explode} + set {_uniqueStorage()._explode = newValue} + } + + public var allowReserved: Bool { + get {return _storage._allowReserved} + set {_uniqueStorage()._allowReserved = newValue} + } + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._contentType) + case 2: try decoder.decodeSingularMessageField(value: &_storage._headers) + case 3: try decoder.decodeSingularStringField(value: &_storage._style) + case 4: try decoder.decodeSingularBoolField(value: &_storage._explode) + case 5: try decoder.decodeSingularBoolField(value: &_storage._allowReserved) + case 6: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._contentType.isEmpty { + try visitor.visitSingularStringField(value: _storage._contentType, fieldNumber: 1) + } + if let v = _storage._headers { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + if !_storage._style.isEmpty { + try visitor.visitSingularStringField(value: _storage._style, fieldNumber: 3) + } + if _storage._explode != false { + try visitor.visitSingularBoolField(value: _storage._explode, fieldNumber: 4) + } + if _storage._allowReserved != false { + try visitor.visitSingularBoolField(value: _storage._allowReserved, fieldNumber: 5) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 6) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_Encodings: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Encodings" + + public var additionalProperties: [Openapi_V3_NamedEncoding] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V3_Example: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Example" + + public var summary: String { + get {return _storage._summary} + set {_uniqueStorage()._summary = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var value: Openapi_V3_Any { + get {return _storage._value ?? Openapi_V3_Any()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var externalValue: String { + get {return _storage._externalValue} + set {_uniqueStorage()._externalValue = newValue} + } + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._summary) + case 2: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 3: try decoder.decodeSingularMessageField(value: &_storage._value) + case 4: try decoder.decodeSingularStringField(value: &_storage._externalValue) + case 5: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._summary.isEmpty { + try visitor.visitSingularStringField(value: _storage._summary, fieldNumber: 1) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 2) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if !_storage._externalValue.isEmpty { + try visitor.visitSingularStringField(value: _storage._externalValue, fieldNumber: 4) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 5) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_ExampleOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".ExampleOrReference" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var example: Openapi_V3_Example { + get { + if case .example(let v)? = _storage._oneof {return v} + return Openapi_V3_Example() + } + set {_uniqueStorage()._oneof = .example(newValue)} + } + + public var reference: Openapi_V3_Reference { + get { + if case .reference(let v)? = _storage._oneof {return v} + return Openapi_V3_Reference() + } + set {_uniqueStorage()._oneof = .reference(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case example(Openapi_V3_Example) + case reference(Openapi_V3_Reference) + + public static func ==(lhs: Openapi_V3_ExampleOrReference.OneOf_Oneof, rhs: Openapi_V3_ExampleOrReference.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.example(let l), .example(let r)): return l == r + case (.reference(let l), .reference(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V3_Example? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .example(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .example(v)} + case 2: + var v: Openapi_V3_Reference? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .reference(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .reference(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .example(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .reference(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_Examples: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Examples" + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let _ = try decoder.nextFieldNumber() { + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V3_ExamplesOrReferences: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".ExamplesOrReferences" + + public var additionalProperties: [Openapi_V3_NamedExampleOrReference] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V3_Expression: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Expression" + + public var additionalProperties: [Openapi_V3_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Allows referencing an external resource for extended documentation. +public struct Openapi_V3_ExternalDocs: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".ExternalDocs" + + public var description_p: String = String() + + public var url: String = String() + + public var specificationExtension: [Openapi_V3_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.description_p) + case 2: try decoder.decodeSingularStringField(value: &self.url) + case 3: try decoder.decodeRepeatedMessageField(value: &self.specificationExtension) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.description_p.isEmpty { + try visitor.visitSingularStringField(value: self.description_p, fieldNumber: 1) + } + if !self.url.isEmpty { + try visitor.visitSingularStringField(value: self.url, fieldNumber: 2) + } + if !self.specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: self.specificationExtension, fieldNumber: 3) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// The Header Object follows the structure of the Parameter Object with the following changes: 1. `name` MUST NOT be specified, it is given in the corresponding `headers` map. 1. `in` MUST NOT be specified, it is implicitly in `header`. 1. All traits that are affected by the location MUST be applicable to a location of `header` (for example, `style`). +public struct Openapi_V3_Header: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Header" + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var required: Bool { + get {return _storage._required} + set {_uniqueStorage()._required = newValue} + } + + public var deprecated: Bool { + get {return _storage._deprecated} + set {_uniqueStorage()._deprecated = newValue} + } + + public var allowEmptyValue: Bool { + get {return _storage._allowEmptyValue} + set {_uniqueStorage()._allowEmptyValue = newValue} + } + + public var style: String { + get {return _storage._style} + set {_uniqueStorage()._style = newValue} + } + + public var explode: Bool { + get {return _storage._explode} + set {_uniqueStorage()._explode = newValue} + } + + public var allowReserved: Bool { + get {return _storage._allowReserved} + set {_uniqueStorage()._allowReserved = newValue} + } + + public var schema: Openapi_V3_SchemaOrReference { + get {return _storage._schema ?? Openapi_V3_SchemaOrReference()} + set {_uniqueStorage()._schema = newValue} + } + /// Returns true if `schema` has been explicitly set. + public var hasSchema: Bool {return _storage._schema != nil} + /// Clears the value of `schema`. Subsequent reads from it will return its default value. + public mutating func clearSchema() {_storage._schema = nil} + + public var example: Openapi_V3_Any { + get {return _storage._example ?? Openapi_V3_Any()} + set {_uniqueStorage()._example = newValue} + } + /// Returns true if `example` has been explicitly set. + public var hasExample: Bool {return _storage._example != nil} + /// Clears the value of `example`. Subsequent reads from it will return its default value. + public mutating func clearExample() {_storage._example = nil} + + public var examples: Openapi_V3_ExamplesOrReferences { + get {return _storage._examples ?? Openapi_V3_ExamplesOrReferences()} + set {_uniqueStorage()._examples = newValue} + } + /// Returns true if `examples` has been explicitly set. + public var hasExamples: Bool {return _storage._examples != nil} + /// Clears the value of `examples`. Subsequent reads from it will return its default value. + public mutating func clearExamples() {_storage._examples = nil} + + public var content: Openapi_V3_MediaTypes { + get {return _storage._content ?? Openapi_V3_MediaTypes()} + set {_uniqueStorage()._content = newValue} + } + /// Returns true if `content` has been explicitly set. + public var hasContent: Bool {return _storage._content != nil} + /// Clears the value of `content`. Subsequent reads from it will return its default value. + public mutating func clearContent() {_storage._content = nil} + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 2: try decoder.decodeSingularBoolField(value: &_storage._required) + case 3: try decoder.decodeSingularBoolField(value: &_storage._deprecated) + case 4: try decoder.decodeSingularBoolField(value: &_storage._allowEmptyValue) + case 5: try decoder.decodeSingularStringField(value: &_storage._style) + case 6: try decoder.decodeSingularBoolField(value: &_storage._explode) + case 7: try decoder.decodeSingularBoolField(value: &_storage._allowReserved) + case 8: try decoder.decodeSingularMessageField(value: &_storage._schema) + case 9: try decoder.decodeSingularMessageField(value: &_storage._example) + case 10: try decoder.decodeSingularMessageField(value: &_storage._examples) + case 11: try decoder.decodeSingularMessageField(value: &_storage._content) + case 12: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 1) + } + if _storage._required != false { + try visitor.visitSingularBoolField(value: _storage._required, fieldNumber: 2) + } + if _storage._deprecated != false { + try visitor.visitSingularBoolField(value: _storage._deprecated, fieldNumber: 3) + } + if _storage._allowEmptyValue != false { + try visitor.visitSingularBoolField(value: _storage._allowEmptyValue, fieldNumber: 4) + } + if !_storage._style.isEmpty { + try visitor.visitSingularStringField(value: _storage._style, fieldNumber: 5) + } + if _storage._explode != false { + try visitor.visitSingularBoolField(value: _storage._explode, fieldNumber: 6) + } + if _storage._allowReserved != false { + try visitor.visitSingularBoolField(value: _storage._allowReserved, fieldNumber: 7) + } + if let v = _storage._schema { + try visitor.visitSingularMessageField(value: v, fieldNumber: 8) + } + if let v = _storage._example { + try visitor.visitSingularMessageField(value: v, fieldNumber: 9) + } + if let v = _storage._examples { + try visitor.visitSingularMessageField(value: v, fieldNumber: 10) + } + if let v = _storage._content { + try visitor.visitSingularMessageField(value: v, fieldNumber: 11) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 12) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_HeaderOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".HeaderOrReference" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var header: Openapi_V3_Header { + get { + if case .header(let v)? = _storage._oneof {return v} + return Openapi_V3_Header() + } + set {_uniqueStorage()._oneof = .header(newValue)} + } + + public var reference: Openapi_V3_Reference { + get { + if case .reference(let v)? = _storage._oneof {return v} + return Openapi_V3_Reference() + } + set {_uniqueStorage()._oneof = .reference(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case header(Openapi_V3_Header) + case reference(Openapi_V3_Reference) + + public static func ==(lhs: Openapi_V3_HeaderOrReference.OneOf_Oneof, rhs: Openapi_V3_HeaderOrReference.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.header(let l), .header(let r)): return l == r + case (.reference(let l), .reference(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V3_Header? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .header(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .header(v)} + case 2: + var v: Openapi_V3_Reference? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .reference(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .reference(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .header(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .reference(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_HeadersOrReferences: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".HeadersOrReferences" + + public var additionalProperties: [Openapi_V3_NamedHeaderOrReference] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// The object provides metadata about the API. The metadata MAY be used by the clients if needed, and MAY be presented in editing or documentation generation tools for convenience. +public struct Openapi_V3_Info: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Info" + + public var title: String { + get {return _storage._title} + set {_uniqueStorage()._title = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var termsOfService: String { + get {return _storage._termsOfService} + set {_uniqueStorage()._termsOfService = newValue} + } + + public var contact: Openapi_V3_Contact { + get {return _storage._contact ?? Openapi_V3_Contact()} + set {_uniqueStorage()._contact = newValue} + } + /// Returns true if `contact` has been explicitly set. + public var hasContact: Bool {return _storage._contact != nil} + /// Clears the value of `contact`. Subsequent reads from it will return its default value. + public mutating func clearContact() {_storage._contact = nil} + + public var license: Openapi_V3_License { + get {return _storage._license ?? Openapi_V3_License()} + set {_uniqueStorage()._license = newValue} + } + /// Returns true if `license` has been explicitly set. + public var hasLicense: Bool {return _storage._license != nil} + /// Clears the value of `license`. Subsequent reads from it will return its default value. + public mutating func clearLicense() {_storage._license = nil} + + public var version: String { + get {return _storage._version} + set {_uniqueStorage()._version = newValue} + } + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._title) + case 2: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 3: try decoder.decodeSingularStringField(value: &_storage._termsOfService) + case 4: try decoder.decodeSingularMessageField(value: &_storage._contact) + case 5: try decoder.decodeSingularMessageField(value: &_storage._license) + case 6: try decoder.decodeSingularStringField(value: &_storage._version) + case 7: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._title.isEmpty { + try visitor.visitSingularStringField(value: _storage._title, fieldNumber: 1) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 2) + } + if !_storage._termsOfService.isEmpty { + try visitor.visitSingularStringField(value: _storage._termsOfService, fieldNumber: 3) + } + if let v = _storage._contact { + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + } + if let v = _storage._license { + try visitor.visitSingularMessageField(value: v, fieldNumber: 5) + } + if !_storage._version.isEmpty { + try visitor.visitSingularStringField(value: _storage._version, fieldNumber: 6) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 7) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_ItemsItem: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".ItemsItem" + + public var schemaOrReference: [Openapi_V3_SchemaOrReference] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.schemaOrReference) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.schemaOrReference.isEmpty { + try visitor.visitRepeatedMessageField(value: self.schemaOrReference, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// License information for the exposed API. +public struct Openapi_V3_License: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".License" + + public var name: String = String() + + public var url: String = String() + + public var specificationExtension: [Openapi_V3_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.name) + case 2: try decoder.decodeSingularStringField(value: &self.url) + case 3: try decoder.decodeRepeatedMessageField(value: &self.specificationExtension) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.name.isEmpty { + try visitor.visitSingularStringField(value: self.name, fieldNumber: 1) + } + if !self.url.isEmpty { + try visitor.visitSingularStringField(value: self.url, fieldNumber: 2) + } + if !self.specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: self.specificationExtension, fieldNumber: 3) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// The `Link object` represents a possible design-time link for a response. The presence of a link does not guarantee the caller's ability to successfully invoke it, rather it provides a known relationship and traversal mechanism between responses and other operations. Unlike _dynamic_ links (i.e. links provided **in** the response payload), the OAS linking mechanism does not require link information in the runtime response. For computing links, and providing instructions to execute them, a runtime expression is used for accessing values in an operation and using them as parameters while invoking the linked operation. +public struct Openapi_V3_Link: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Link" + + public var operationRef: String { + get {return _storage._operationRef} + set {_uniqueStorage()._operationRef = newValue} + } + + public var operationID: String { + get {return _storage._operationID} + set {_uniqueStorage()._operationID = newValue} + } + + public var parameters: Openapi_V3_AnysOrExpressions { + get {return _storage._parameters ?? Openapi_V3_AnysOrExpressions()} + set {_uniqueStorage()._parameters = newValue} + } + /// Returns true if `parameters` has been explicitly set. + public var hasParameters: Bool {return _storage._parameters != nil} + /// Clears the value of `parameters`. Subsequent reads from it will return its default value. + public mutating func clearParameters() {_storage._parameters = nil} + + public var requestBody: Openapi_V3_AnyOrExpression { + get {return _storage._requestBody ?? Openapi_V3_AnyOrExpression()} + set {_uniqueStorage()._requestBody = newValue} + } + /// Returns true if `requestBody` has been explicitly set. + public var hasRequestBody: Bool {return _storage._requestBody != nil} + /// Clears the value of `requestBody`. Subsequent reads from it will return its default value. + public mutating func clearRequestBody() {_storage._requestBody = nil} + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var server: Openapi_V3_Server { + get {return _storage._server ?? Openapi_V3_Server()} + set {_uniqueStorage()._server = newValue} + } + /// Returns true if `server` has been explicitly set. + public var hasServer: Bool {return _storage._server != nil} + /// Clears the value of `server`. Subsequent reads from it will return its default value. + public mutating func clearServer() {_storage._server = nil} + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._operationRef) + case 2: try decoder.decodeSingularStringField(value: &_storage._operationID) + case 3: try decoder.decodeSingularMessageField(value: &_storage._parameters) + case 4: try decoder.decodeSingularMessageField(value: &_storage._requestBody) + case 5: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 6: try decoder.decodeSingularMessageField(value: &_storage._server) + case 7: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._operationRef.isEmpty { + try visitor.visitSingularStringField(value: _storage._operationRef, fieldNumber: 1) + } + if !_storage._operationID.isEmpty { + try visitor.visitSingularStringField(value: _storage._operationID, fieldNumber: 2) + } + if let v = _storage._parameters { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if let v = _storage._requestBody { + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 5) + } + if let v = _storage._server { + try visitor.visitSingularMessageField(value: v, fieldNumber: 6) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 7) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_LinkOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".LinkOrReference" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var link: Openapi_V3_Link { + get { + if case .link(let v)? = _storage._oneof {return v} + return Openapi_V3_Link() + } + set {_uniqueStorage()._oneof = .link(newValue)} + } + + public var reference: Openapi_V3_Reference { + get { + if case .reference(let v)? = _storage._oneof {return v} + return Openapi_V3_Reference() + } + set {_uniqueStorage()._oneof = .reference(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case link(Openapi_V3_Link) + case reference(Openapi_V3_Reference) + + public static func ==(lhs: Openapi_V3_LinkOrReference.OneOf_Oneof, rhs: Openapi_V3_LinkOrReference.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.link(let l), .link(let r)): return l == r + case (.reference(let l), .reference(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V3_Link? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .link(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .link(v)} + case 2: + var v: Openapi_V3_Reference? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .reference(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .reference(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .link(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .reference(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_LinksOrReferences: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".LinksOrReferences" + + public var additionalProperties: [Openapi_V3_NamedLinkOrReference] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Each Media Type Object provides schema and examples for the media type identified by its key. +public struct Openapi_V3_MediaType: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".MediaType" + + public var schema: Openapi_V3_SchemaOrReference { + get {return _storage._schema ?? Openapi_V3_SchemaOrReference()} + set {_uniqueStorage()._schema = newValue} + } + /// Returns true if `schema` has been explicitly set. + public var hasSchema: Bool {return _storage._schema != nil} + /// Clears the value of `schema`. Subsequent reads from it will return its default value. + public mutating func clearSchema() {_storage._schema = nil} + + public var example: Openapi_V3_Any { + get {return _storage._example ?? Openapi_V3_Any()} + set {_uniqueStorage()._example = newValue} + } + /// Returns true if `example` has been explicitly set. + public var hasExample: Bool {return _storage._example != nil} + /// Clears the value of `example`. Subsequent reads from it will return its default value. + public mutating func clearExample() {_storage._example = nil} + + public var examples: Openapi_V3_ExamplesOrReferences { + get {return _storage._examples ?? Openapi_V3_ExamplesOrReferences()} + set {_uniqueStorage()._examples = newValue} + } + /// Returns true if `examples` has been explicitly set. + public var hasExamples: Bool {return _storage._examples != nil} + /// Clears the value of `examples`. Subsequent reads from it will return its default value. + public mutating func clearExamples() {_storage._examples = nil} + + public var encoding: Openapi_V3_Encodings { + get {return _storage._encoding ?? Openapi_V3_Encodings()} + set {_uniqueStorage()._encoding = newValue} + } + /// Returns true if `encoding` has been explicitly set. + public var hasEncoding: Bool {return _storage._encoding != nil} + /// Clears the value of `encoding`. Subsequent reads from it will return its default value. + public mutating func clearEncoding() {_storage._encoding = nil} + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularMessageField(value: &_storage._schema) + case 2: try decoder.decodeSingularMessageField(value: &_storage._example) + case 3: try decoder.decodeSingularMessageField(value: &_storage._examples) + case 4: try decoder.decodeSingularMessageField(value: &_storage._encoding) + case 5: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if let v = _storage._schema { + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + } + if let v = _storage._example { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + if let v = _storage._examples { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if let v = _storage._encoding { + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 5) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_MediaTypes: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".MediaTypes" + + public var additionalProperties: [Openapi_V3_NamedMediaType] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Automatically-generated message used to represent maps of Any as ordered (name,value) pairs. +public struct Openapi_V3_NamedAny: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedAny" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V3_Any { + get {return _storage._value ?? Openapi_V3_Any()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of AnyOrExpression as ordered (name,value) pairs. +public struct Openapi_V3_NamedAnyOrExpression: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedAnyOrExpression" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V3_AnyOrExpression { + get {return _storage._value ?? Openapi_V3_AnyOrExpression()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of CallbackOrReference as ordered (name,value) pairs. +public struct Openapi_V3_NamedCallbackOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedCallbackOrReference" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V3_CallbackOrReference { + get {return _storage._value ?? Openapi_V3_CallbackOrReference()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of Encoding as ordered (name,value) pairs. +public struct Openapi_V3_NamedEncoding: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedEncoding" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V3_Encoding { + get {return _storage._value ?? Openapi_V3_Encoding()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of ExampleOrReference as ordered (name,value) pairs. +public struct Openapi_V3_NamedExampleOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedExampleOrReference" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V3_ExampleOrReference { + get {return _storage._value ?? Openapi_V3_ExampleOrReference()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of HeaderOrReference as ordered (name,value) pairs. +public struct Openapi_V3_NamedHeaderOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedHeaderOrReference" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V3_HeaderOrReference { + get {return _storage._value ?? Openapi_V3_HeaderOrReference()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of LinkOrReference as ordered (name,value) pairs. +public struct Openapi_V3_NamedLinkOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedLinkOrReference" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V3_LinkOrReference { + get {return _storage._value ?? Openapi_V3_LinkOrReference()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of MediaType as ordered (name,value) pairs. +public struct Openapi_V3_NamedMediaType: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedMediaType" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V3_MediaType { + get {return _storage._value ?? Openapi_V3_MediaType()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of ParameterOrReference as ordered (name,value) pairs. +public struct Openapi_V3_NamedParameterOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedParameterOrReference" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V3_ParameterOrReference { + get {return _storage._value ?? Openapi_V3_ParameterOrReference()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of PathItem as ordered (name,value) pairs. +public struct Openapi_V3_NamedPathItem: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedPathItem" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V3_PathItem { + get {return _storage._value ?? Openapi_V3_PathItem()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of RequestBodyOrReference as ordered (name,value) pairs. +public struct Openapi_V3_NamedRequestBodyOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedRequestBodyOrReference" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V3_RequestBodyOrReference { + get {return _storage._value ?? Openapi_V3_RequestBodyOrReference()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of ResponseOrReference as ordered (name,value) pairs. +public struct Openapi_V3_NamedResponseOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedResponseOrReference" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V3_ResponseOrReference { + get {return _storage._value ?? Openapi_V3_ResponseOrReference()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of SchemaOrReference as ordered (name,value) pairs. +public struct Openapi_V3_NamedSchemaOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedSchemaOrReference" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V3_SchemaOrReference { + get {return _storage._value ?? Openapi_V3_SchemaOrReference()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of SecuritySchemeOrReference as ordered (name,value) pairs. +public struct Openapi_V3_NamedSecuritySchemeOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedSecuritySchemeOrReference" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V3_SecuritySchemeOrReference { + get {return _storage._value ?? Openapi_V3_SecuritySchemeOrReference()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of ServerVariable as ordered (name,value) pairs. +public struct Openapi_V3_NamedServerVariable: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedServerVariable" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Openapi_V3_ServerVariable { + get {return _storage._value ?? Openapi_V3_ServerVariable()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of string as ordered (name,value) pairs. +public struct Openapi_V3_NamedString: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedString" + + /// Map key + public var name: String = String() + + /// Mapped value + public var value: String = String() + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.name) + case 2: try decoder.decodeSingularStringField(value: &self.value) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.name.isEmpty { + try visitor.visitSingularStringField(value: self.name, fieldNumber: 1) + } + if !self.value.isEmpty { + try visitor.visitSingularStringField(value: self.value, fieldNumber: 2) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Configuration details for a supported OAuth Flow +public struct Openapi_V3_OauthFlow: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".OauthFlow" + + public var authorizationURL: String { + get {return _storage._authorizationURL} + set {_uniqueStorage()._authorizationURL = newValue} + } + + public var tokenURL: String { + get {return _storage._tokenURL} + set {_uniqueStorage()._tokenURL = newValue} + } + + public var refreshURL: String { + get {return _storage._refreshURL} + set {_uniqueStorage()._refreshURL = newValue} + } + + public var scopes: Openapi_V3_Strings { + get {return _storage._scopes ?? Openapi_V3_Strings()} + set {_uniqueStorage()._scopes = newValue} + } + /// Returns true if `scopes` has been explicitly set. + public var hasScopes: Bool {return _storage._scopes != nil} + /// Clears the value of `scopes`. Subsequent reads from it will return its default value. + public mutating func clearScopes() {_storage._scopes = nil} + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._authorizationURL) + case 2: try decoder.decodeSingularStringField(value: &_storage._tokenURL) + case 3: try decoder.decodeSingularStringField(value: &_storage._refreshURL) + case 4: try decoder.decodeSingularMessageField(value: &_storage._scopes) + case 5: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._authorizationURL.isEmpty { + try visitor.visitSingularStringField(value: _storage._authorizationURL, fieldNumber: 1) + } + if !_storage._tokenURL.isEmpty { + try visitor.visitSingularStringField(value: _storage._tokenURL, fieldNumber: 2) + } + if !_storage._refreshURL.isEmpty { + try visitor.visitSingularStringField(value: _storage._refreshURL, fieldNumber: 3) + } + if let v = _storage._scopes { + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 5) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Allows configuration of the supported OAuth Flows. +public struct Openapi_V3_OauthFlows: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".OauthFlows" + + public var implicit: Openapi_V3_OauthFlow { + get {return _storage._implicit ?? Openapi_V3_OauthFlow()} + set {_uniqueStorage()._implicit = newValue} + } + /// Returns true if `implicit` has been explicitly set. + public var hasImplicit: Bool {return _storage._implicit != nil} + /// Clears the value of `implicit`. Subsequent reads from it will return its default value. + public mutating func clearImplicit() {_storage._implicit = nil} + + public var password: Openapi_V3_OauthFlow { + get {return _storage._password ?? Openapi_V3_OauthFlow()} + set {_uniqueStorage()._password = newValue} + } + /// Returns true if `password` has been explicitly set. + public var hasPassword: Bool {return _storage._password != nil} + /// Clears the value of `password`. Subsequent reads from it will return its default value. + public mutating func clearPassword() {_storage._password = nil} + + public var clientCredentials: Openapi_V3_OauthFlow { + get {return _storage._clientCredentials ?? Openapi_V3_OauthFlow()} + set {_uniqueStorage()._clientCredentials = newValue} + } + /// Returns true if `clientCredentials` has been explicitly set. + public var hasClientCredentials: Bool {return _storage._clientCredentials != nil} + /// Clears the value of `clientCredentials`. Subsequent reads from it will return its default value. + public mutating func clearClientCredentials() {_storage._clientCredentials = nil} + + public var authorizationCode: Openapi_V3_OauthFlow { + get {return _storage._authorizationCode ?? Openapi_V3_OauthFlow()} + set {_uniqueStorage()._authorizationCode = newValue} + } + /// Returns true if `authorizationCode` has been explicitly set. + public var hasAuthorizationCode: Bool {return _storage._authorizationCode != nil} + /// Clears the value of `authorizationCode`. Subsequent reads from it will return its default value. + public mutating func clearAuthorizationCode() {_storage._authorizationCode = nil} + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularMessageField(value: &_storage._implicit) + case 2: try decoder.decodeSingularMessageField(value: &_storage._password) + case 3: try decoder.decodeSingularMessageField(value: &_storage._clientCredentials) + case 4: try decoder.decodeSingularMessageField(value: &_storage._authorizationCode) + case 5: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if let v = _storage._implicit { + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + } + if let v = _storage._password { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + if let v = _storage._clientCredentials { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if let v = _storage._authorizationCode { + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 5) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_Object: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Object" + + public var additionalProperties: [Openapi_V3_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Describes a single API operation on a path. +public struct Openapi_V3_Operation: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Operation" + + public var tags: [String] { + get {return _storage._tags} + set {_uniqueStorage()._tags = newValue} + } + + public var summary: String { + get {return _storage._summary} + set {_uniqueStorage()._summary = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var externalDocs: Openapi_V3_ExternalDocs { + get {return _storage._externalDocs ?? Openapi_V3_ExternalDocs()} + set {_uniqueStorage()._externalDocs = newValue} + } + /// Returns true if `externalDocs` has been explicitly set. + public var hasExternalDocs: Bool {return _storage._externalDocs != nil} + /// Clears the value of `externalDocs`. Subsequent reads from it will return its default value. + public mutating func clearExternalDocs() {_storage._externalDocs = nil} + + public var operationID: String { + get {return _storage._operationID} + set {_uniqueStorage()._operationID = newValue} + } + + public var parameters: [Openapi_V3_ParameterOrReference] { + get {return _storage._parameters} + set {_uniqueStorage()._parameters = newValue} + } + + public var requestBody: Openapi_V3_RequestBodyOrReference { + get {return _storage._requestBody ?? Openapi_V3_RequestBodyOrReference()} + set {_uniqueStorage()._requestBody = newValue} + } + /// Returns true if `requestBody` has been explicitly set. + public var hasRequestBody: Bool {return _storage._requestBody != nil} + /// Clears the value of `requestBody`. Subsequent reads from it will return its default value. + public mutating func clearRequestBody() {_storage._requestBody = nil} + + public var responses: Openapi_V3_Responses { + get {return _storage._responses ?? Openapi_V3_Responses()} + set {_uniqueStorage()._responses = newValue} + } + /// Returns true if `responses` has been explicitly set. + public var hasResponses: Bool {return _storage._responses != nil} + /// Clears the value of `responses`. Subsequent reads from it will return its default value. + public mutating func clearResponses() {_storage._responses = nil} + + public var callbacks: Openapi_V3_CallbacksOrReferences { + get {return _storage._callbacks ?? Openapi_V3_CallbacksOrReferences()} + set {_uniqueStorage()._callbacks = newValue} + } + /// Returns true if `callbacks` has been explicitly set. + public var hasCallbacks: Bool {return _storage._callbacks != nil} + /// Clears the value of `callbacks`. Subsequent reads from it will return its default value. + public mutating func clearCallbacks() {_storage._callbacks = nil} + + public var deprecated: Bool { + get {return _storage._deprecated} + set {_uniqueStorage()._deprecated = newValue} + } + + public var security: [Openapi_V3_SecurityRequirement] { + get {return _storage._security} + set {_uniqueStorage()._security = newValue} + } + + public var servers: [Openapi_V3_Server] { + get {return _storage._servers} + set {_uniqueStorage()._servers = newValue} + } + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedStringField(value: &_storage._tags) + case 2: try decoder.decodeSingularStringField(value: &_storage._summary) + case 3: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 4: try decoder.decodeSingularMessageField(value: &_storage._externalDocs) + case 5: try decoder.decodeSingularStringField(value: &_storage._operationID) + case 6: try decoder.decodeRepeatedMessageField(value: &_storage._parameters) + case 7: try decoder.decodeSingularMessageField(value: &_storage._requestBody) + case 8: try decoder.decodeSingularMessageField(value: &_storage._responses) + case 9: try decoder.decodeSingularMessageField(value: &_storage._callbacks) + case 10: try decoder.decodeSingularBoolField(value: &_storage._deprecated) + case 11: try decoder.decodeRepeatedMessageField(value: &_storage._security) + case 12: try decoder.decodeRepeatedMessageField(value: &_storage._servers) + case 13: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._tags.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._tags, fieldNumber: 1) + } + if !_storage._summary.isEmpty { + try visitor.visitSingularStringField(value: _storage._summary, fieldNumber: 2) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 3) + } + if let v = _storage._externalDocs { + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + } + if !_storage._operationID.isEmpty { + try visitor.visitSingularStringField(value: _storage._operationID, fieldNumber: 5) + } + if !_storage._parameters.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._parameters, fieldNumber: 6) + } + if let v = _storage._requestBody { + try visitor.visitSingularMessageField(value: v, fieldNumber: 7) + } + if let v = _storage._responses { + try visitor.visitSingularMessageField(value: v, fieldNumber: 8) + } + if let v = _storage._callbacks { + try visitor.visitSingularMessageField(value: v, fieldNumber: 9) + } + if _storage._deprecated != false { + try visitor.visitSingularBoolField(value: _storage._deprecated, fieldNumber: 10) + } + if !_storage._security.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._security, fieldNumber: 11) + } + if !_storage._servers.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._servers, fieldNumber: 12) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 13) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Describes a single operation parameter. A unique parameter is defined by a combination of a name and location. +public struct Openapi_V3_Parameter: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Parameter" + + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + public var `in`: String { + get {return _storage._in} + set {_uniqueStorage()._in = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var required: Bool { + get {return _storage._required} + set {_uniqueStorage()._required = newValue} + } + + public var deprecated: Bool { + get {return _storage._deprecated} + set {_uniqueStorage()._deprecated = newValue} + } + + public var allowEmptyValue: Bool { + get {return _storage._allowEmptyValue} + set {_uniqueStorage()._allowEmptyValue = newValue} + } + + public var style: String { + get {return _storage._style} + set {_uniqueStorage()._style = newValue} + } + + public var explode: Bool { + get {return _storage._explode} + set {_uniqueStorage()._explode = newValue} + } + + public var allowReserved: Bool { + get {return _storage._allowReserved} + set {_uniqueStorage()._allowReserved = newValue} + } + + public var schema: Openapi_V3_SchemaOrReference { + get {return _storage._schema ?? Openapi_V3_SchemaOrReference()} + set {_uniqueStorage()._schema = newValue} + } + /// Returns true if `schema` has been explicitly set. + public var hasSchema: Bool {return _storage._schema != nil} + /// Clears the value of `schema`. Subsequent reads from it will return its default value. + public mutating func clearSchema() {_storage._schema = nil} + + public var example: Openapi_V3_Any { + get {return _storage._example ?? Openapi_V3_Any()} + set {_uniqueStorage()._example = newValue} + } + /// Returns true if `example` has been explicitly set. + public var hasExample: Bool {return _storage._example != nil} + /// Clears the value of `example`. Subsequent reads from it will return its default value. + public mutating func clearExample() {_storage._example = nil} + + public var examples: Openapi_V3_ExamplesOrReferences { + get {return _storage._examples ?? Openapi_V3_ExamplesOrReferences()} + set {_uniqueStorage()._examples = newValue} + } + /// Returns true if `examples` has been explicitly set. + public var hasExamples: Bool {return _storage._examples != nil} + /// Clears the value of `examples`. Subsequent reads from it will return its default value. + public mutating func clearExamples() {_storage._examples = nil} + + public var content: Openapi_V3_MediaTypes { + get {return _storage._content ?? Openapi_V3_MediaTypes()} + set {_uniqueStorage()._content = newValue} + } + /// Returns true if `content` has been explicitly set. + public var hasContent: Bool {return _storage._content != nil} + /// Clears the value of `content`. Subsequent reads from it will return its default value. + public mutating func clearContent() {_storage._content = nil} + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularStringField(value: &_storage._in) + case 3: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 4: try decoder.decodeSingularBoolField(value: &_storage._required) + case 5: try decoder.decodeSingularBoolField(value: &_storage._deprecated) + case 6: try decoder.decodeSingularBoolField(value: &_storage._allowEmptyValue) + case 7: try decoder.decodeSingularStringField(value: &_storage._style) + case 8: try decoder.decodeSingularBoolField(value: &_storage._explode) + case 9: try decoder.decodeSingularBoolField(value: &_storage._allowReserved) + case 10: try decoder.decodeSingularMessageField(value: &_storage._schema) + case 11: try decoder.decodeSingularMessageField(value: &_storage._example) + case 12: try decoder.decodeSingularMessageField(value: &_storage._examples) + case 13: try decoder.decodeSingularMessageField(value: &_storage._content) + case 14: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if !_storage._in.isEmpty { + try visitor.visitSingularStringField(value: _storage._in, fieldNumber: 2) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 3) + } + if _storage._required != false { + try visitor.visitSingularBoolField(value: _storage._required, fieldNumber: 4) + } + if _storage._deprecated != false { + try visitor.visitSingularBoolField(value: _storage._deprecated, fieldNumber: 5) + } + if _storage._allowEmptyValue != false { + try visitor.visitSingularBoolField(value: _storage._allowEmptyValue, fieldNumber: 6) + } + if !_storage._style.isEmpty { + try visitor.visitSingularStringField(value: _storage._style, fieldNumber: 7) + } + if _storage._explode != false { + try visitor.visitSingularBoolField(value: _storage._explode, fieldNumber: 8) + } + if _storage._allowReserved != false { + try visitor.visitSingularBoolField(value: _storage._allowReserved, fieldNumber: 9) + } + if let v = _storage._schema { + try visitor.visitSingularMessageField(value: v, fieldNumber: 10) + } + if let v = _storage._example { + try visitor.visitSingularMessageField(value: v, fieldNumber: 11) + } + if let v = _storage._examples { + try visitor.visitSingularMessageField(value: v, fieldNumber: 12) + } + if let v = _storage._content { + try visitor.visitSingularMessageField(value: v, fieldNumber: 13) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 14) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_ParameterOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".ParameterOrReference" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var parameter: Openapi_V3_Parameter { + get { + if case .parameter(let v)? = _storage._oneof {return v} + return Openapi_V3_Parameter() + } + set {_uniqueStorage()._oneof = .parameter(newValue)} + } + + public var reference: Openapi_V3_Reference { + get { + if case .reference(let v)? = _storage._oneof {return v} + return Openapi_V3_Reference() + } + set {_uniqueStorage()._oneof = .reference(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case parameter(Openapi_V3_Parameter) + case reference(Openapi_V3_Reference) + + public static func ==(lhs: Openapi_V3_ParameterOrReference.OneOf_Oneof, rhs: Openapi_V3_ParameterOrReference.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.parameter(let l), .parameter(let r)): return l == r + case (.reference(let l), .reference(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V3_Parameter? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .parameter(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .parameter(v)} + case 2: + var v: Openapi_V3_Reference? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .reference(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .reference(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .parameter(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .reference(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_ParametersOrReferences: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".ParametersOrReferences" + + public var additionalProperties: [Openapi_V3_NamedParameterOrReference] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Describes the operations available on a single path. A Path Item MAY be empty, due to ACL constraints. The path itself is still exposed to the documentation viewer but they will not know which operations and parameters are available. +public struct Openapi_V3_PathItem: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".PathItem" + + public var ref: String { + get {return _storage._ref} + set {_uniqueStorage()._ref = newValue} + } + + public var summary: String { + get {return _storage._summary} + set {_uniqueStorage()._summary = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var get: Openapi_V3_Operation { + get {return _storage._get ?? Openapi_V3_Operation()} + set {_uniqueStorage()._get = newValue} + } + /// Returns true if `get` has been explicitly set. + public var hasGet: Bool {return _storage._get != nil} + /// Clears the value of `get`. Subsequent reads from it will return its default value. + public mutating func clearGet() {_storage._get = nil} + + public var put: Openapi_V3_Operation { + get {return _storage._put ?? Openapi_V3_Operation()} + set {_uniqueStorage()._put = newValue} + } + /// Returns true if `put` has been explicitly set. + public var hasPut: Bool {return _storage._put != nil} + /// Clears the value of `put`. Subsequent reads from it will return its default value. + public mutating func clearPut() {_storage._put = nil} + + public var post: Openapi_V3_Operation { + get {return _storage._post ?? Openapi_V3_Operation()} + set {_uniqueStorage()._post = newValue} + } + /// Returns true if `post` has been explicitly set. + public var hasPost: Bool {return _storage._post != nil} + /// Clears the value of `post`. Subsequent reads from it will return its default value. + public mutating func clearPost() {_storage._post = nil} + + public var delete: Openapi_V3_Operation { + get {return _storage._delete ?? Openapi_V3_Operation()} + set {_uniqueStorage()._delete = newValue} + } + /// Returns true if `delete` has been explicitly set. + public var hasDelete: Bool {return _storage._delete != nil} + /// Clears the value of `delete`. Subsequent reads from it will return its default value. + public mutating func clearDelete() {_storage._delete = nil} + + public var options: Openapi_V3_Operation { + get {return _storage._options ?? Openapi_V3_Operation()} + set {_uniqueStorage()._options = newValue} + } + /// Returns true if `options` has been explicitly set. + public var hasOptions: Bool {return _storage._options != nil} + /// Clears the value of `options`. Subsequent reads from it will return its default value. + public mutating func clearOptions() {_storage._options = nil} + + public var head: Openapi_V3_Operation { + get {return _storage._head ?? Openapi_V3_Operation()} + set {_uniqueStorage()._head = newValue} + } + /// Returns true if `head` has been explicitly set. + public var hasHead: Bool {return _storage._head != nil} + /// Clears the value of `head`. Subsequent reads from it will return its default value. + public mutating func clearHead() {_storage._head = nil} + + public var patch: Openapi_V3_Operation { + get {return _storage._patch ?? Openapi_V3_Operation()} + set {_uniqueStorage()._patch = newValue} + } + /// Returns true if `patch` has been explicitly set. + public var hasPatch: Bool {return _storage._patch != nil} + /// Clears the value of `patch`. Subsequent reads from it will return its default value. + public mutating func clearPatch() {_storage._patch = nil} + + public var trace: Openapi_V3_Operation { + get {return _storage._trace ?? Openapi_V3_Operation()} + set {_uniqueStorage()._trace = newValue} + } + /// Returns true if `trace` has been explicitly set. + public var hasTrace: Bool {return _storage._trace != nil} + /// Clears the value of `trace`. Subsequent reads from it will return its default value. + public mutating func clearTrace() {_storage._trace = nil} + + public var servers: [Openapi_V3_Server] { + get {return _storage._servers} + set {_uniqueStorage()._servers = newValue} + } + + public var parameters: [Openapi_V3_ParameterOrReference] { + get {return _storage._parameters} + set {_uniqueStorage()._parameters = newValue} + } + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._ref) + case 2: try decoder.decodeSingularStringField(value: &_storage._summary) + case 3: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 4: try decoder.decodeSingularMessageField(value: &_storage._get) + case 5: try decoder.decodeSingularMessageField(value: &_storage._put) + case 6: try decoder.decodeSingularMessageField(value: &_storage._post) + case 7: try decoder.decodeSingularMessageField(value: &_storage._delete) + case 8: try decoder.decodeSingularMessageField(value: &_storage._options) + case 9: try decoder.decodeSingularMessageField(value: &_storage._head) + case 10: try decoder.decodeSingularMessageField(value: &_storage._patch) + case 11: try decoder.decodeSingularMessageField(value: &_storage._trace) + case 12: try decoder.decodeRepeatedMessageField(value: &_storage._servers) + case 13: try decoder.decodeRepeatedMessageField(value: &_storage._parameters) + case 14: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._ref.isEmpty { + try visitor.visitSingularStringField(value: _storage._ref, fieldNumber: 1) + } + if !_storage._summary.isEmpty { + try visitor.visitSingularStringField(value: _storage._summary, fieldNumber: 2) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 3) + } + if let v = _storage._get { + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + } + if let v = _storage._put { + try visitor.visitSingularMessageField(value: v, fieldNumber: 5) + } + if let v = _storage._post { + try visitor.visitSingularMessageField(value: v, fieldNumber: 6) + } + if let v = _storage._delete { + try visitor.visitSingularMessageField(value: v, fieldNumber: 7) + } + if let v = _storage._options { + try visitor.visitSingularMessageField(value: v, fieldNumber: 8) + } + if let v = _storage._head { + try visitor.visitSingularMessageField(value: v, fieldNumber: 9) + } + if let v = _storage._patch { + try visitor.visitSingularMessageField(value: v, fieldNumber: 10) + } + if let v = _storage._trace { + try visitor.visitSingularMessageField(value: v, fieldNumber: 11) + } + if !_storage._servers.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._servers, fieldNumber: 12) + } + if !_storage._parameters.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._parameters, fieldNumber: 13) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 14) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Holds the relative paths to the individual endpoints and their operations. The path is appended to the URL from the `Server Object` in order to construct the full URL. The Paths MAY be empty, due to ACL constraints. +public struct Openapi_V3_Paths: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Paths" + + public var path: [Openapi_V3_NamedPathItem] = [] + + public var specificationExtension: [Openapi_V3_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.path) + case 2: try decoder.decodeRepeatedMessageField(value: &self.specificationExtension) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.path.isEmpty { + try visitor.visitRepeatedMessageField(value: self.path, fieldNumber: 1) + } + if !self.specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: self.specificationExtension, fieldNumber: 2) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V3_Properties: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Properties" + + public var additionalProperties: [Openapi_V3_NamedSchemaOrReference] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// A simple object to allow referencing other components in the specification, internally and externally. The Reference Object is defined by JSON Reference and follows the same structure, behavior and rules. For this specification, reference resolution is accomplished as defined by the JSON Reference specification and not by the JSON Schema specification. +public struct Openapi_V3_Reference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Reference" + + public var ref: String = String() + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.ref) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.ref.isEmpty { + try visitor.visitSingularStringField(value: self.ref, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V3_RequestBodiesOrReferences: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".RequestBodiesOrReferences" + + public var additionalProperties: [Openapi_V3_NamedRequestBodyOrReference] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Describes a single request body. +public struct Openapi_V3_RequestBody: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".RequestBody" + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var content: Openapi_V3_MediaTypes { + get {return _storage._content ?? Openapi_V3_MediaTypes()} + set {_uniqueStorage()._content = newValue} + } + /// Returns true if `content` has been explicitly set. + public var hasContent: Bool {return _storage._content != nil} + /// Clears the value of `content`. Subsequent reads from it will return its default value. + public mutating func clearContent() {_storage._content = nil} + + public var required: Bool { + get {return _storage._required} + set {_uniqueStorage()._required = newValue} + } + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 2: try decoder.decodeSingularMessageField(value: &_storage._content) + case 3: try decoder.decodeSingularBoolField(value: &_storage._required) + case 4: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 1) + } + if let v = _storage._content { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + if _storage._required != false { + try visitor.visitSingularBoolField(value: _storage._required, fieldNumber: 3) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 4) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_RequestBodyOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".RequestBodyOrReference" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var requestBody: Openapi_V3_RequestBody { + get { + if case .requestBody(let v)? = _storage._oneof {return v} + return Openapi_V3_RequestBody() + } + set {_uniqueStorage()._oneof = .requestBody(newValue)} + } + + public var reference: Openapi_V3_Reference { + get { + if case .reference(let v)? = _storage._oneof {return v} + return Openapi_V3_Reference() + } + set {_uniqueStorage()._oneof = .reference(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case requestBody(Openapi_V3_RequestBody) + case reference(Openapi_V3_Reference) + + public static func ==(lhs: Openapi_V3_RequestBodyOrReference.OneOf_Oneof, rhs: Openapi_V3_RequestBodyOrReference.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.requestBody(let l), .requestBody(let r)): return l == r + case (.reference(let l), .reference(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V3_RequestBody? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .requestBody(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .requestBody(v)} + case 2: + var v: Openapi_V3_Reference? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .reference(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .reference(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .requestBody(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .reference(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Describes a single response from an API Operation, including design-time, static `links` to operations based on the response. +public struct Openapi_V3_Response: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Response" + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var headers: Openapi_V3_HeadersOrReferences { + get {return _storage._headers ?? Openapi_V3_HeadersOrReferences()} + set {_uniqueStorage()._headers = newValue} + } + /// Returns true if `headers` has been explicitly set. + public var hasHeaders: Bool {return _storage._headers != nil} + /// Clears the value of `headers`. Subsequent reads from it will return its default value. + public mutating func clearHeaders() {_storage._headers = nil} + + public var content: Openapi_V3_MediaTypes { + get {return _storage._content ?? Openapi_V3_MediaTypes()} + set {_uniqueStorage()._content = newValue} + } + /// Returns true if `content` has been explicitly set. + public var hasContent: Bool {return _storage._content != nil} + /// Clears the value of `content`. Subsequent reads from it will return its default value. + public mutating func clearContent() {_storage._content = nil} + + public var links: Openapi_V3_LinksOrReferences { + get {return _storage._links ?? Openapi_V3_LinksOrReferences()} + set {_uniqueStorage()._links = newValue} + } + /// Returns true if `links` has been explicitly set. + public var hasLinks: Bool {return _storage._links != nil} + /// Clears the value of `links`. Subsequent reads from it will return its default value. + public mutating func clearLinks() {_storage._links = nil} + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 2: try decoder.decodeSingularMessageField(value: &_storage._headers) + case 3: try decoder.decodeSingularMessageField(value: &_storage._content) + case 4: try decoder.decodeSingularMessageField(value: &_storage._links) + case 5: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 1) + } + if let v = _storage._headers { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + if let v = _storage._content { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if let v = _storage._links { + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 5) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_ResponseOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".ResponseOrReference" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var response: Openapi_V3_Response { + get { + if case .response(let v)? = _storage._oneof {return v} + return Openapi_V3_Response() + } + set {_uniqueStorage()._oneof = .response(newValue)} + } + + public var reference: Openapi_V3_Reference { + get { + if case .reference(let v)? = _storage._oneof {return v} + return Openapi_V3_Reference() + } + set {_uniqueStorage()._oneof = .reference(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case response(Openapi_V3_Response) + case reference(Openapi_V3_Reference) + + public static func ==(lhs: Openapi_V3_ResponseOrReference.OneOf_Oneof, rhs: Openapi_V3_ResponseOrReference.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.response(let l), .response(let r)): return l == r + case (.reference(let l), .reference(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V3_Response? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .response(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .response(v)} + case 2: + var v: Openapi_V3_Reference? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .reference(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .reference(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .response(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .reference(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// A container for the expected responses of an operation. The container maps a HTTP response code to the expected response. The documentation is not necessarily expected to cover all possible HTTP response codes because they may not be known in advance. However, documentation is expected to cover a successful operation response and any known errors. The `default` MAY be used as a default response object for all HTTP codes that are not covered individually by the specification. The `Responses Object` MUST contain at least one response code, and it SHOULD be the response for a successful operation call. +public struct Openapi_V3_Responses: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Responses" + + public var `default`: Openapi_V3_ResponseOrReference { + get {return _storage._default ?? Openapi_V3_ResponseOrReference()} + set {_uniqueStorage()._default = newValue} + } + /// Returns true if ``default`` has been explicitly set. + public var hasDefault: Bool {return _storage._default != nil} + /// Clears the value of ``default``. Subsequent reads from it will return its default value. + public mutating func clearDefault() {_storage._default = nil} + + public var responseOrReference: [Openapi_V3_NamedResponseOrReference] { + get {return _storage._responseOrReference} + set {_uniqueStorage()._responseOrReference = newValue} + } + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularMessageField(value: &_storage._default) + case 2: try decoder.decodeRepeatedMessageField(value: &_storage._responseOrReference) + case 3: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if let v = _storage._default { + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + } + if !_storage._responseOrReference.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._responseOrReference, fieldNumber: 2) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 3) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_ResponsesOrReferences: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".ResponsesOrReferences" + + public var additionalProperties: [Openapi_V3_NamedResponseOrReference] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// The Schema Object allows the definition of input and output data types. These types can be objects, but also primitives and arrays. This object is an extended subset of the JSON Schema Specification Wright Draft 00. For more information about the properties, see JSON Schema Core and JSON Schema Validation. Unless stated otherwise, the property definitions follow the JSON Schema. +public struct Openapi_V3_Schema: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Schema" + + public var nullable: Bool { + get {return _storage._nullable} + set {_uniqueStorage()._nullable = newValue} + } + + public var discriminator: Openapi_V3_Discriminator { + get {return _storage._discriminator ?? Openapi_V3_Discriminator()} + set {_uniqueStorage()._discriminator = newValue} + } + /// Returns true if `discriminator` has been explicitly set. + public var hasDiscriminator: Bool {return _storage._discriminator != nil} + /// Clears the value of `discriminator`. Subsequent reads from it will return its default value. + public mutating func clearDiscriminator() {_storage._discriminator = nil} + + public var readOnly: Bool { + get {return _storage._readOnly} + set {_uniqueStorage()._readOnly = newValue} + } + + public var writeOnly: Bool { + get {return _storage._writeOnly} + set {_uniqueStorage()._writeOnly = newValue} + } + + public var xml: Openapi_V3_Xml { + get {return _storage._xml ?? Openapi_V3_Xml()} + set {_uniqueStorage()._xml = newValue} + } + /// Returns true if `xml` has been explicitly set. + public var hasXml: Bool {return _storage._xml != nil} + /// Clears the value of `xml`. Subsequent reads from it will return its default value. + public mutating func clearXml() {_storage._xml = nil} + + public var externalDocs: Openapi_V3_ExternalDocs { + get {return _storage._externalDocs ?? Openapi_V3_ExternalDocs()} + set {_uniqueStorage()._externalDocs = newValue} + } + /// Returns true if `externalDocs` has been explicitly set. + public var hasExternalDocs: Bool {return _storage._externalDocs != nil} + /// Clears the value of `externalDocs`. Subsequent reads from it will return its default value. + public mutating func clearExternalDocs() {_storage._externalDocs = nil} + + public var example: Openapi_V3_Any { + get {return _storage._example ?? Openapi_V3_Any()} + set {_uniqueStorage()._example = newValue} + } + /// Returns true if `example` has been explicitly set. + public var hasExample: Bool {return _storage._example != nil} + /// Clears the value of `example`. Subsequent reads from it will return its default value. + public mutating func clearExample() {_storage._example = nil} + + public var deprecated: Bool { + get {return _storage._deprecated} + set {_uniqueStorage()._deprecated = newValue} + } + + public var title: String { + get {return _storage._title} + set {_uniqueStorage()._title = newValue} + } + + public var multipleOf: Double { + get {return _storage._multipleOf} + set {_uniqueStorage()._multipleOf = newValue} + } + + public var maximum: Double { + get {return _storage._maximum} + set {_uniqueStorage()._maximum = newValue} + } + + public var exclusiveMaximum: Bool { + get {return _storage._exclusiveMaximum} + set {_uniqueStorage()._exclusiveMaximum = newValue} + } + + public var minimum: Double { + get {return _storage._minimum} + set {_uniqueStorage()._minimum = newValue} + } + + public var exclusiveMinimum: Bool { + get {return _storage._exclusiveMinimum} + set {_uniqueStorage()._exclusiveMinimum = newValue} + } + + public var maxLength: Int64 { + get {return _storage._maxLength} + set {_uniqueStorage()._maxLength = newValue} + } + + public var minLength: Int64 { + get {return _storage._minLength} + set {_uniqueStorage()._minLength = newValue} + } + + public var pattern: String { + get {return _storage._pattern} + set {_uniqueStorage()._pattern = newValue} + } + + public var maxItems: Int64 { + get {return _storage._maxItems} + set {_uniqueStorage()._maxItems = newValue} + } + + public var minItems: Int64 { + get {return _storage._minItems} + set {_uniqueStorage()._minItems = newValue} + } + + public var uniqueItems: Bool { + get {return _storage._uniqueItems} + set {_uniqueStorage()._uniqueItems = newValue} + } + + public var maxProperties: Int64 { + get {return _storage._maxProperties} + set {_uniqueStorage()._maxProperties = newValue} + } + + public var minProperties: Int64 { + get {return _storage._minProperties} + set {_uniqueStorage()._minProperties = newValue} + } + + public var required: [String] { + get {return _storage._required} + set {_uniqueStorage()._required = newValue} + } + + public var `enum`: [Openapi_V3_Any] { + get {return _storage._enum} + set {_uniqueStorage()._enum = newValue} + } + + public var type: String { + get {return _storage._type} + set {_uniqueStorage()._type = newValue} + } + + public var allOf: [Openapi_V3_SchemaOrReference] { + get {return _storage._allOf} + set {_uniqueStorage()._allOf = newValue} + } + + public var oneOf: [Openapi_V3_SchemaOrReference] { + get {return _storage._oneOf} + set {_uniqueStorage()._oneOf = newValue} + } + + public var anyOf: [Openapi_V3_SchemaOrReference] { + get {return _storage._anyOf} + set {_uniqueStorage()._anyOf = newValue} + } + + public var not: Openapi_V3_Schema { + get {return _storage._not ?? Openapi_V3_Schema()} + set {_uniqueStorage()._not = newValue} + } + /// Returns true if `not` has been explicitly set. + public var hasNot: Bool {return _storage._not != nil} + /// Clears the value of `not`. Subsequent reads from it will return its default value. + public mutating func clearNot() {_storage._not = nil} + + public var items: Openapi_V3_ItemsItem { + get {return _storage._items ?? Openapi_V3_ItemsItem()} + set {_uniqueStorage()._items = newValue} + } + /// Returns true if `items` has been explicitly set. + public var hasItems: Bool {return _storage._items != nil} + /// Clears the value of `items`. Subsequent reads from it will return its default value. + public mutating func clearItems() {_storage._items = nil} + + public var properties: Openapi_V3_Properties { + get {return _storage._properties ?? Openapi_V3_Properties()} + set {_uniqueStorage()._properties = newValue} + } + /// Returns true if `properties` has been explicitly set. + public var hasProperties: Bool {return _storage._properties != nil} + /// Clears the value of `properties`. Subsequent reads from it will return its default value. + public mutating func clearProperties() {_storage._properties = nil} + + public var additionalProperties: Openapi_V3_AdditionalPropertiesItem { + get {return _storage._additionalProperties ?? Openapi_V3_AdditionalPropertiesItem()} + set {_uniqueStorage()._additionalProperties = newValue} + } + /// Returns true if `additionalProperties` has been explicitly set. + public var hasAdditionalProperties: Bool {return _storage._additionalProperties != nil} + /// Clears the value of `additionalProperties`. Subsequent reads from it will return its default value. + public mutating func clearAdditionalProperties() {_storage._additionalProperties = nil} + + public var `default`: Openapi_V3_DefaultType { + get {return _storage._default ?? Openapi_V3_DefaultType()} + set {_uniqueStorage()._default = newValue} + } + /// Returns true if ``default`` has been explicitly set. + public var hasDefault: Bool {return _storage._default != nil} + /// Clears the value of ``default``. Subsequent reads from it will return its default value. + public mutating func clearDefault() {_storage._default = nil} + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var format: String { + get {return _storage._format} + set {_uniqueStorage()._format = newValue} + } + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularBoolField(value: &_storage._nullable) + case 2: try decoder.decodeSingularMessageField(value: &_storage._discriminator) + case 3: try decoder.decodeSingularBoolField(value: &_storage._readOnly) + case 4: try decoder.decodeSingularBoolField(value: &_storage._writeOnly) + case 5: try decoder.decodeSingularMessageField(value: &_storage._xml) + case 6: try decoder.decodeSingularMessageField(value: &_storage._externalDocs) + case 7: try decoder.decodeSingularMessageField(value: &_storage._example) + case 8: try decoder.decodeSingularBoolField(value: &_storage._deprecated) + case 9: try decoder.decodeSingularStringField(value: &_storage._title) + case 10: try decoder.decodeSingularDoubleField(value: &_storage._multipleOf) + case 11: try decoder.decodeSingularDoubleField(value: &_storage._maximum) + case 12: try decoder.decodeSingularBoolField(value: &_storage._exclusiveMaximum) + case 13: try decoder.decodeSingularDoubleField(value: &_storage._minimum) + case 14: try decoder.decodeSingularBoolField(value: &_storage._exclusiveMinimum) + case 15: try decoder.decodeSingularInt64Field(value: &_storage._maxLength) + case 16: try decoder.decodeSingularInt64Field(value: &_storage._minLength) + case 17: try decoder.decodeSingularStringField(value: &_storage._pattern) + case 18: try decoder.decodeSingularInt64Field(value: &_storage._maxItems) + case 19: try decoder.decodeSingularInt64Field(value: &_storage._minItems) + case 20: try decoder.decodeSingularBoolField(value: &_storage._uniqueItems) + case 21: try decoder.decodeSingularInt64Field(value: &_storage._maxProperties) + case 22: try decoder.decodeSingularInt64Field(value: &_storage._minProperties) + case 23: try decoder.decodeRepeatedStringField(value: &_storage._required) + case 24: try decoder.decodeRepeatedMessageField(value: &_storage._enum) + case 25: try decoder.decodeSingularStringField(value: &_storage._type) + case 26: try decoder.decodeRepeatedMessageField(value: &_storage._allOf) + case 27: try decoder.decodeRepeatedMessageField(value: &_storage._oneOf) + case 28: try decoder.decodeRepeatedMessageField(value: &_storage._anyOf) + case 29: try decoder.decodeSingularMessageField(value: &_storage._not) + case 30: try decoder.decodeSingularMessageField(value: &_storage._items) + case 31: try decoder.decodeSingularMessageField(value: &_storage._properties) + case 32: try decoder.decodeSingularMessageField(value: &_storage._additionalProperties) + case 33: try decoder.decodeSingularMessageField(value: &_storage._default) + case 34: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 35: try decoder.decodeSingularStringField(value: &_storage._format) + case 36: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if _storage._nullable != false { + try visitor.visitSingularBoolField(value: _storage._nullable, fieldNumber: 1) + } + if let v = _storage._discriminator { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + if _storage._readOnly != false { + try visitor.visitSingularBoolField(value: _storage._readOnly, fieldNumber: 3) + } + if _storage._writeOnly != false { + try visitor.visitSingularBoolField(value: _storage._writeOnly, fieldNumber: 4) + } + if let v = _storage._xml { + try visitor.visitSingularMessageField(value: v, fieldNumber: 5) + } + if let v = _storage._externalDocs { + try visitor.visitSingularMessageField(value: v, fieldNumber: 6) + } + if let v = _storage._example { + try visitor.visitSingularMessageField(value: v, fieldNumber: 7) + } + if _storage._deprecated != false { + try visitor.visitSingularBoolField(value: _storage._deprecated, fieldNumber: 8) + } + if !_storage._title.isEmpty { + try visitor.visitSingularStringField(value: _storage._title, fieldNumber: 9) + } + if _storage._multipleOf != 0 { + try visitor.visitSingularDoubleField(value: _storage._multipleOf, fieldNumber: 10) + } + if _storage._maximum != 0 { + try visitor.visitSingularDoubleField(value: _storage._maximum, fieldNumber: 11) + } + if _storage._exclusiveMaximum != false { + try visitor.visitSingularBoolField(value: _storage._exclusiveMaximum, fieldNumber: 12) + } + if _storage._minimum != 0 { + try visitor.visitSingularDoubleField(value: _storage._minimum, fieldNumber: 13) + } + if _storage._exclusiveMinimum != false { + try visitor.visitSingularBoolField(value: _storage._exclusiveMinimum, fieldNumber: 14) + } + if _storage._maxLength != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxLength, fieldNumber: 15) + } + if _storage._minLength != 0 { + try visitor.visitSingularInt64Field(value: _storage._minLength, fieldNumber: 16) + } + if !_storage._pattern.isEmpty { + try visitor.visitSingularStringField(value: _storage._pattern, fieldNumber: 17) + } + if _storage._maxItems != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxItems, fieldNumber: 18) + } + if _storage._minItems != 0 { + try visitor.visitSingularInt64Field(value: _storage._minItems, fieldNumber: 19) + } + if _storage._uniqueItems != false { + try visitor.visitSingularBoolField(value: _storage._uniqueItems, fieldNumber: 20) + } + if _storage._maxProperties != 0 { + try visitor.visitSingularInt64Field(value: _storage._maxProperties, fieldNumber: 21) + } + if _storage._minProperties != 0 { + try visitor.visitSingularInt64Field(value: _storage._minProperties, fieldNumber: 22) + } + if !_storage._required.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._required, fieldNumber: 23) + } + if !_storage._enum.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._enum, fieldNumber: 24) + } + if !_storage._type.isEmpty { + try visitor.visitSingularStringField(value: _storage._type, fieldNumber: 25) + } + if !_storage._allOf.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._allOf, fieldNumber: 26) + } + if !_storage._oneOf.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._oneOf, fieldNumber: 27) + } + if !_storage._anyOf.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._anyOf, fieldNumber: 28) + } + if let v = _storage._not { + try visitor.visitSingularMessageField(value: v, fieldNumber: 29) + } + if let v = _storage._items { + try visitor.visitSingularMessageField(value: v, fieldNumber: 30) + } + if let v = _storage._properties { + try visitor.visitSingularMessageField(value: v, fieldNumber: 31) + } + if let v = _storage._additionalProperties { + try visitor.visitSingularMessageField(value: v, fieldNumber: 32) + } + if let v = _storage._default { + try visitor.visitSingularMessageField(value: v, fieldNumber: 33) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 34) + } + if !_storage._format.isEmpty { + try visitor.visitSingularStringField(value: _storage._format, fieldNumber: 35) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 36) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_SchemaOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".SchemaOrReference" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var schema: Openapi_V3_Schema { + get { + if case .schema(let v)? = _storage._oneof {return v} + return Openapi_V3_Schema() + } + set {_uniqueStorage()._oneof = .schema(newValue)} + } + + public var reference: Openapi_V3_Reference { + get { + if case .reference(let v)? = _storage._oneof {return v} + return Openapi_V3_Reference() + } + set {_uniqueStorage()._oneof = .reference(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case schema(Openapi_V3_Schema) + case reference(Openapi_V3_Reference) + + public static func ==(lhs: Openapi_V3_SchemaOrReference.OneOf_Oneof, rhs: Openapi_V3_SchemaOrReference.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.schema(let l), .schema(let r)): return l == r + case (.reference(let l), .reference(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V3_Schema? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .schema(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .schema(v)} + case 2: + var v: Openapi_V3_Reference? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .reference(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .reference(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .schema(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .reference(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_SchemasOrReferences: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".SchemasOrReferences" + + public var additionalProperties: [Openapi_V3_NamedSchemaOrReference] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Lists the required security schemes to execute this operation. The name used for each property MUST correspond to a security scheme declared in the Security Schemes under the Components Object. Security Requirement Objects that contain multiple schemes require that all schemes MUST be satisfied for a request to be authorized. This enables support for scenarios where multiple query parameters or HTTP headers are required to convey security information. When a list of Security Requirement Objects is defined on the Open API object or Operation Object, only one of Security Requirement Objects in the list needs to be satisfied to authorize the request. +public struct Openapi_V3_SecurityRequirement: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".SecurityRequirement" + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let _ = try decoder.nextFieldNumber() { + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Defines a security scheme that can be used by the operations. Supported schemes are HTTP authentication, an API key (either as a header or as a query parameter), OAuth2's common flows (implicit, password, application and access code) as defined in RFC6749, and OpenID Connect Discovery. +public struct Openapi_V3_SecurityScheme: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".SecurityScheme" + + public var type: String { + get {return _storage._type} + set {_uniqueStorage()._type = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + public var `in`: String { + get {return _storage._in} + set {_uniqueStorage()._in = newValue} + } + + public var scheme: String { + get {return _storage._scheme} + set {_uniqueStorage()._scheme = newValue} + } + + public var bearerFormat: String { + get {return _storage._bearerFormat} + set {_uniqueStorage()._bearerFormat = newValue} + } + + public var flows: Openapi_V3_OauthFlows { + get {return _storage._flows ?? Openapi_V3_OauthFlows()} + set {_uniqueStorage()._flows = newValue} + } + /// Returns true if `flows` has been explicitly set. + public var hasFlows: Bool {return _storage._flows != nil} + /// Clears the value of `flows`. Subsequent reads from it will return its default value. + public mutating func clearFlows() {_storage._flows = nil} + + public var openIDConnectURL: String { + get {return _storage._openIDConnectURL} + set {_uniqueStorage()._openIDConnectURL = newValue} + } + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._type) + case 2: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 3: try decoder.decodeSingularStringField(value: &_storage._name) + case 4: try decoder.decodeSingularStringField(value: &_storage._in) + case 5: try decoder.decodeSingularStringField(value: &_storage._scheme) + case 6: try decoder.decodeSingularStringField(value: &_storage._bearerFormat) + case 7: try decoder.decodeSingularMessageField(value: &_storage._flows) + case 8: try decoder.decodeSingularStringField(value: &_storage._openIDConnectURL) + case 9: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._type.isEmpty { + try visitor.visitSingularStringField(value: _storage._type, fieldNumber: 1) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 2) + } + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 3) + } + if !_storage._in.isEmpty { + try visitor.visitSingularStringField(value: _storage._in, fieldNumber: 4) + } + if !_storage._scheme.isEmpty { + try visitor.visitSingularStringField(value: _storage._scheme, fieldNumber: 5) + } + if !_storage._bearerFormat.isEmpty { + try visitor.visitSingularStringField(value: _storage._bearerFormat, fieldNumber: 6) + } + if let v = _storage._flows { + try visitor.visitSingularMessageField(value: v, fieldNumber: 7) + } + if !_storage._openIDConnectURL.isEmpty { + try visitor.visitSingularStringField(value: _storage._openIDConnectURL, fieldNumber: 8) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 9) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_SecuritySchemeOrReference: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".SecuritySchemeOrReference" + + public var oneof: OneOf_Oneof? { + get {return _storage._oneof} + set {_uniqueStorage()._oneof = newValue} + } + + public var securityScheme: Openapi_V3_SecurityScheme { + get { + if case .securityScheme(let v)? = _storage._oneof {return v} + return Openapi_V3_SecurityScheme() + } + set {_uniqueStorage()._oneof = .securityScheme(newValue)} + } + + public var reference: Openapi_V3_Reference { + get { + if case .reference(let v)? = _storage._oneof {return v} + return Openapi_V3_Reference() + } + set {_uniqueStorage()._oneof = .reference(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case securityScheme(Openapi_V3_SecurityScheme) + case reference(Openapi_V3_Reference) + + public static func ==(lhs: Openapi_V3_SecuritySchemeOrReference.OneOf_Oneof, rhs: Openapi_V3_SecuritySchemeOrReference.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.securityScheme(let l), .securityScheme(let r)): return l == r + case (.reference(let l), .reference(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + var v: Openapi_V3_SecurityScheme? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .securityScheme(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .securityScheme(v)} + case 2: + var v: Openapi_V3_Reference? + if let current = _storage._oneof { + try decoder.handleConflictingOneOf() + if case .reference(let m) = current {v = m} + } + try decoder.decodeSingularMessageField(value: &v) + if let v = v {_storage._oneof = .reference(v)} + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + switch _storage._oneof { + case .securityScheme(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + case .reference(let v)?: + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + case nil: break + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Openapi_V3_SecuritySchemesOrReferences: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".SecuritySchemesOrReferences" + + public var additionalProperties: [Openapi_V3_NamedSecuritySchemeOrReference] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// An object representing a Server. +public struct Openapi_V3_Server: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Server" + + public var url: String { + get {return _storage._url} + set {_uniqueStorage()._url = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var variables: Openapi_V3_ServerVariables { + get {return _storage._variables ?? Openapi_V3_ServerVariables()} + set {_uniqueStorage()._variables = newValue} + } + /// Returns true if `variables` has been explicitly set. + public var hasVariables: Bool {return _storage._variables != nil} + /// Clears the value of `variables`. Subsequent reads from it will return its default value. + public mutating func clearVariables() {_storage._variables = nil} + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._url) + case 2: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 3: try decoder.decodeSingularMessageField(value: &_storage._variables) + case 4: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._url.isEmpty { + try visitor.visitSingularStringField(value: _storage._url, fieldNumber: 1) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 2) + } + if let v = _storage._variables { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 4) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// An object representing a Server Variable for server URL template substitution. +public struct Openapi_V3_ServerVariable: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".ServerVariable" + + public var `enum`: [String] = [] + + public var `default`: String = String() + + public var description_p: String = String() + + public var specificationExtension: [Openapi_V3_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedStringField(value: &self.`enum`) + case 2: try decoder.decodeSingularStringField(value: &self.`default`) + case 3: try decoder.decodeSingularStringField(value: &self.description_p) + case 4: try decoder.decodeRepeatedMessageField(value: &self.specificationExtension) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.`enum`.isEmpty { + try visitor.visitRepeatedStringField(value: self.`enum`, fieldNumber: 1) + } + if !self.`default`.isEmpty { + try visitor.visitSingularStringField(value: self.`default`, fieldNumber: 2) + } + if !self.description_p.isEmpty { + try visitor.visitSingularStringField(value: self.description_p, fieldNumber: 3) + } + if !self.specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: self.specificationExtension, fieldNumber: 4) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V3_ServerVariables: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".ServerVariables" + + public var additionalProperties: [Openapi_V3_NamedServerVariable] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Any property starting with x- is valid. +public struct Openapi_V3_SpecificationExtension: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".SpecificationExtension" + + public var oneof: Openapi_V3_SpecificationExtension.OneOf_Oneof? = nil + + public var number: Double { + get { + if case .number(let v)? = oneof {return v} + return 0 + } + set {oneof = .number(newValue)} + } + + public var boolean: Bool { + get { + if case .boolean(let v)? = oneof {return v} + return false + } + set {oneof = .boolean(newValue)} + } + + public var string: String { + get { + if case .string(let v)? = oneof {return v} + return String() + } + set {oneof = .string(newValue)} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public enum OneOf_Oneof: Equatable { + case number(Double) + case boolean(Bool) + case string(String) + + public static func ==(lhs: Openapi_V3_SpecificationExtension.OneOf_Oneof, rhs: Openapi_V3_SpecificationExtension.OneOf_Oneof) -> Bool { + switch (lhs, rhs) { + case (.number(let l), .number(let r)): return l == r + case (.boolean(let l), .boolean(let r)): return l == r + case (.string(let l), .string(let r)): return l == r + default: return false + } + } + } + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: + if self.oneof != nil {try decoder.handleConflictingOneOf()} + var v: Double? + try decoder.decodeSingularDoubleField(value: &v) + if let v = v {self.oneof = .number(v)} + case 2: + if self.oneof != nil {try decoder.handleConflictingOneOf()} + var v: Bool? + try decoder.decodeSingularBoolField(value: &v) + if let v = v {self.oneof = .boolean(v)} + case 3: + if self.oneof != nil {try decoder.handleConflictingOneOf()} + var v: String? + try decoder.decodeSingularStringField(value: &v) + if let v = v {self.oneof = .string(v)} + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + switch self.oneof { + case .number(let v)?: + try visitor.visitSingularDoubleField(value: v, fieldNumber: 1) + case .boolean(let v)?: + try visitor.visitSingularBoolField(value: v, fieldNumber: 2) + case .string(let v)?: + try visitor.visitSingularStringField(value: v, fieldNumber: 3) + case nil: break + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V3_StringArray: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".StringArray" + + public var value: [String] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedStringField(value: &self.value) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.value.isEmpty { + try visitor.visitRepeatedStringField(value: self.value, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Openapi_V3_Strings: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Strings" + + public var additionalProperties: [Openapi_V3_NamedString] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Adds metadata to a single tag that is used by the Operation Object. It is not mandatory to have a Tag Object per tag defined in the Operation Object instances. +public struct Openapi_V3_Tag: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Tag" + + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var externalDocs: Openapi_V3_ExternalDocs { + get {return _storage._externalDocs ?? Openapi_V3_ExternalDocs()} + set {_uniqueStorage()._externalDocs = newValue} + } + /// Returns true if `externalDocs` has been explicitly set. + public var hasExternalDocs: Bool {return _storage._externalDocs != nil} + /// Clears the value of `externalDocs`. Subsequent reads from it will return its default value. + public mutating func clearExternalDocs() {_storage._externalDocs = nil} + + public var specificationExtension: [Openapi_V3_NamedAny] { + get {return _storage._specificationExtension} + set {_uniqueStorage()._specificationExtension = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 3: try decoder.decodeSingularMessageField(value: &_storage._externalDocs) + case 4: try decoder.decodeRepeatedMessageField(value: &_storage._specificationExtension) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 2) + } + if let v = _storage._externalDocs { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if !_storage._specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._specificationExtension, fieldNumber: 4) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// A metadata object that allows for more fine-tuned XML model definitions. When using arrays, XML element names are *not* inferred (for singular/plural forms) and the `name` property SHOULD be used to add that information. See examples for expected behavior. +public struct Openapi_V3_Xml: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Xml" + + public var name: String = String() + + public var namespace: String = String() + + public var prefix: String = String() + + public var attribute: Bool = false + + public var wrapped: Bool = false + + public var specificationExtension: [Openapi_V3_NamedAny] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.name) + case 2: try decoder.decodeSingularStringField(value: &self.namespace) + case 3: try decoder.decodeSingularStringField(value: &self.prefix) + case 4: try decoder.decodeSingularBoolField(value: &self.attribute) + case 5: try decoder.decodeSingularBoolField(value: &self.wrapped) + case 6: try decoder.decodeRepeatedMessageField(value: &self.specificationExtension) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.name.isEmpty { + try visitor.visitSingularStringField(value: self.name, fieldNumber: 1) + } + if !self.namespace.isEmpty { + try visitor.visitSingularStringField(value: self.namespace, fieldNumber: 2) + } + if !self.prefix.isEmpty { + try visitor.visitSingularStringField(value: self.prefix, fieldNumber: 3) + } + if self.attribute != false { + try visitor.visitSingularBoolField(value: self.attribute, fieldNumber: 4) + } + if self.wrapped != false { + try visitor.visitSingularBoolField(value: self.wrapped, fieldNumber: 5) + } + if !self.specificationExtension.isEmpty { + try visitor.visitRepeatedMessageField(value: self.specificationExtension, fieldNumber: 6) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +// MARK: - Code below here is support for the SwiftProtobuf runtime. + +fileprivate let _protobuf_package = "openapi.v3" + +extension Openapi_V3_AdditionalPropertiesItem: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "schema_or_reference"), + 2: .same(proto: "boolean"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V3_AdditionalPropertiesItem.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_AdditionalPropertiesItem) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Any: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "value"), + 2: .same(proto: "yaml"), + ] + + fileprivate class _StorageClass { + var _value: SwiftProtobuf.Google_Protobuf_Any? = nil + var _yaml: String = String() + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _value = source._value + _yaml = source._yaml + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Any) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._value != other_storage._value {return false} + if _storage._yaml != other_storage._yaml {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_AnyOrExpression: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "any"), + 2: .same(proto: "expression"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V3_AnyOrExpression.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_AnyOrExpression) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_AnysOrExpressions: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_AnysOrExpressions) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Callback: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "path"), + 2: .standard(proto: "specification_extension"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Callback) -> Bool { + if self.path != other.path {return false} + if self.specificationExtension != other.specificationExtension {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_CallbackOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "callback"), + 2: .same(proto: "reference"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V3_CallbackOrReference.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_CallbackOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_CallbacksOrReferences: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_CallbacksOrReferences) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Components: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "schemas"), + 2: .same(proto: "responses"), + 3: .same(proto: "parameters"), + 4: .same(proto: "examples"), + 5: .standard(proto: "request_bodies"), + 6: .same(proto: "headers"), + 7: .standard(proto: "security_schemes"), + 8: .same(proto: "links"), + 9: .same(proto: "callbacks"), + 10: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _schemas: Openapi_V3_SchemasOrReferences? = nil + var _responses: Openapi_V3_ResponsesOrReferences? = nil + var _parameters: Openapi_V3_ParametersOrReferences? = nil + var _examples: Openapi_V3_ExamplesOrReferences? = nil + var _requestBodies: Openapi_V3_RequestBodiesOrReferences? = nil + var _headers: Openapi_V3_HeadersOrReferences? = nil + var _securitySchemes: Openapi_V3_SecuritySchemesOrReferences? = nil + var _links: Openapi_V3_LinksOrReferences? = nil + var _callbacks: Openapi_V3_CallbacksOrReferences? = nil + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _schemas = source._schemas + _responses = source._responses + _parameters = source._parameters + _examples = source._examples + _requestBodies = source._requestBodies + _headers = source._headers + _securitySchemes = source._securitySchemes + _links = source._links + _callbacks = source._callbacks + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Components) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._schemas != other_storage._schemas {return false} + if _storage._responses != other_storage._responses {return false} + if _storage._parameters != other_storage._parameters {return false} + if _storage._examples != other_storage._examples {return false} + if _storage._requestBodies != other_storage._requestBodies {return false} + if _storage._headers != other_storage._headers {return false} + if _storage._securitySchemes != other_storage._securitySchemes {return false} + if _storage._links != other_storage._links {return false} + if _storage._callbacks != other_storage._callbacks {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Contact: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "url"), + 3: .same(proto: "email"), + 4: .standard(proto: "specification_extension"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Contact) -> Bool { + if self.name != other.name {return false} + if self.url != other.url {return false} + if self.email != other.email {return false} + if self.specificationExtension != other.specificationExtension {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_DefaultType: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "number"), + 2: .same(proto: "boolean"), + 3: .same(proto: "string"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_DefaultType) -> Bool { + if self.oneof != other.oneof {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Discriminator: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "property_name"), + 2: .same(proto: "mapping"), + ] + + fileprivate class _StorageClass { + var _propertyName: String = String() + var _mapping: Openapi_V3_Strings? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _propertyName = source._propertyName + _mapping = source._mapping + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Discriminator) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._propertyName != other_storage._propertyName {return false} + if _storage._mapping != other_storage._mapping {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Document: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "openapi"), + 2: .same(proto: "info"), + 3: .same(proto: "servers"), + 4: .same(proto: "paths"), + 5: .same(proto: "components"), + 6: .same(proto: "security"), + 7: .same(proto: "tags"), + 8: .standard(proto: "external_docs"), + 9: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _openapi: String = String() + var _info: Openapi_V3_Info? = nil + var _servers: [Openapi_V3_Server] = [] + var _paths: Openapi_V3_Paths? = nil + var _components: Openapi_V3_Components? = nil + var _security: [Openapi_V3_SecurityRequirement] = [] + var _tags: [Openapi_V3_Tag] = [] + var _externalDocs: Openapi_V3_ExternalDocs? = nil + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _openapi = source._openapi + _info = source._info + _servers = source._servers + _paths = source._paths + _components = source._components + _security = source._security + _tags = source._tags + _externalDocs = source._externalDocs + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Document) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._openapi != other_storage._openapi {return false} + if _storage._info != other_storage._info {return false} + if _storage._servers != other_storage._servers {return false} + if _storage._paths != other_storage._paths {return false} + if _storage._components != other_storage._components {return false} + if _storage._security != other_storage._security {return false} + if _storage._tags != other_storage._tags {return false} + if _storage._externalDocs != other_storage._externalDocs {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Encoding: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "content_type"), + 2: .same(proto: "headers"), + 3: .same(proto: "style"), + 4: .same(proto: "explode"), + 5: .standard(proto: "allow_reserved"), + 6: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _contentType: String = String() + var _headers: Openapi_V3_HeadersOrReferences? = nil + var _style: String = String() + var _explode: Bool = false + var _allowReserved: Bool = false + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _contentType = source._contentType + _headers = source._headers + _style = source._style + _explode = source._explode + _allowReserved = source._allowReserved + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Encoding) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._contentType != other_storage._contentType {return false} + if _storage._headers != other_storage._headers {return false} + if _storage._style != other_storage._style {return false} + if _storage._explode != other_storage._explode {return false} + if _storage._allowReserved != other_storage._allowReserved {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Encodings: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Encodings) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Example: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "summary"), + 2: .same(proto: "description"), + 3: .same(proto: "value"), + 4: .standard(proto: "external_value"), + 5: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _summary: String = String() + var _description_p: String = String() + var _value: Openapi_V3_Any? = nil + var _externalValue: String = String() + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _summary = source._summary + _description_p = source._description_p + _value = source._value + _externalValue = source._externalValue + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Example) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._summary != other_storage._summary {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._value != other_storage._value {return false} + if _storage._externalValue != other_storage._externalValue {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_ExampleOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "example"), + 2: .same(proto: "reference"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V3_ExampleOrReference.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_ExampleOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Examples: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap = SwiftProtobuf._NameMap() + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Examples) -> Bool { + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_ExamplesOrReferences: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_ExamplesOrReferences) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Expression: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Expression) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_ExternalDocs: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "description"), + 2: .same(proto: "url"), + 3: .standard(proto: "specification_extension"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_ExternalDocs) -> Bool { + if self.description_p != other.description_p {return false} + if self.url != other.url {return false} + if self.specificationExtension != other.specificationExtension {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Header: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "description"), + 2: .same(proto: "required"), + 3: .same(proto: "deprecated"), + 4: .standard(proto: "allow_empty_value"), + 5: .same(proto: "style"), + 6: .same(proto: "explode"), + 7: .standard(proto: "allow_reserved"), + 8: .same(proto: "schema"), + 9: .same(proto: "example"), + 10: .same(proto: "examples"), + 11: .same(proto: "content"), + 12: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _description_p: String = String() + var _required: Bool = false + var _deprecated: Bool = false + var _allowEmptyValue: Bool = false + var _style: String = String() + var _explode: Bool = false + var _allowReserved: Bool = false + var _schema: Openapi_V3_SchemaOrReference? = nil + var _example: Openapi_V3_Any? = nil + var _examples: Openapi_V3_ExamplesOrReferences? = nil + var _content: Openapi_V3_MediaTypes? = nil + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _description_p = source._description_p + _required = source._required + _deprecated = source._deprecated + _allowEmptyValue = source._allowEmptyValue + _style = source._style + _explode = source._explode + _allowReserved = source._allowReserved + _schema = source._schema + _example = source._example + _examples = source._examples + _content = source._content + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Header) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._description_p != other_storage._description_p {return false} + if _storage._required != other_storage._required {return false} + if _storage._deprecated != other_storage._deprecated {return false} + if _storage._allowEmptyValue != other_storage._allowEmptyValue {return false} + if _storage._style != other_storage._style {return false} + if _storage._explode != other_storage._explode {return false} + if _storage._allowReserved != other_storage._allowReserved {return false} + if _storage._schema != other_storage._schema {return false} + if _storage._example != other_storage._example {return false} + if _storage._examples != other_storage._examples {return false} + if _storage._content != other_storage._content {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_HeaderOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "header"), + 2: .same(proto: "reference"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V3_HeaderOrReference.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_HeaderOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_HeadersOrReferences: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_HeadersOrReferences) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Info: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "title"), + 2: .same(proto: "description"), + 3: .standard(proto: "terms_of_service"), + 4: .same(proto: "contact"), + 5: .same(proto: "license"), + 6: .same(proto: "version"), + 7: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _title: String = String() + var _description_p: String = String() + var _termsOfService: String = String() + var _contact: Openapi_V3_Contact? = nil + var _license: Openapi_V3_License? = nil + var _version: String = String() + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _title = source._title + _description_p = source._description_p + _termsOfService = source._termsOfService + _contact = source._contact + _license = source._license + _version = source._version + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Info) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._title != other_storage._title {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._termsOfService != other_storage._termsOfService {return false} + if _storage._contact != other_storage._contact {return false} + if _storage._license != other_storage._license {return false} + if _storage._version != other_storage._version {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_ItemsItem: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "schema_or_reference"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_ItemsItem) -> Bool { + if self.schemaOrReference != other.schemaOrReference {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_License: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "url"), + 3: .standard(proto: "specification_extension"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_License) -> Bool { + if self.name != other.name {return false} + if self.url != other.url {return false} + if self.specificationExtension != other.specificationExtension {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Link: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "operation_ref"), + 2: .standard(proto: "operation_id"), + 3: .same(proto: "parameters"), + 4: .standard(proto: "request_body"), + 5: .same(proto: "description"), + 6: .same(proto: "server"), + 7: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _operationRef: String = String() + var _operationID: String = String() + var _parameters: Openapi_V3_AnysOrExpressions? = nil + var _requestBody: Openapi_V3_AnyOrExpression? = nil + var _description_p: String = String() + var _server: Openapi_V3_Server? = nil + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _operationRef = source._operationRef + _operationID = source._operationID + _parameters = source._parameters + _requestBody = source._requestBody + _description_p = source._description_p + _server = source._server + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Link) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._operationRef != other_storage._operationRef {return false} + if _storage._operationID != other_storage._operationID {return false} + if _storage._parameters != other_storage._parameters {return false} + if _storage._requestBody != other_storage._requestBody {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._server != other_storage._server {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_LinkOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "link"), + 2: .same(proto: "reference"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V3_LinkOrReference.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_LinkOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_LinksOrReferences: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_LinksOrReferences) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_MediaType: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "schema"), + 2: .same(proto: "example"), + 3: .same(proto: "examples"), + 4: .same(proto: "encoding"), + 5: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _schema: Openapi_V3_SchemaOrReference? = nil + var _example: Openapi_V3_Any? = nil + var _examples: Openapi_V3_ExamplesOrReferences? = nil + var _encoding: Openapi_V3_Encodings? = nil + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _schema = source._schema + _example = source._example + _examples = source._examples + _encoding = source._encoding + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_MediaType) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._schema != other_storage._schema {return false} + if _storage._example != other_storage._example {return false} + if _storage._examples != other_storage._examples {return false} + if _storage._encoding != other_storage._encoding {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_MediaTypes: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_MediaTypes) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_NamedAny: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V3_Any? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_NamedAny) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_NamedAnyOrExpression: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V3_AnyOrExpression? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_NamedAnyOrExpression) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_NamedCallbackOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V3_CallbackOrReference? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_NamedCallbackOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_NamedEncoding: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V3_Encoding? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_NamedEncoding) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_NamedExampleOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V3_ExampleOrReference? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_NamedExampleOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_NamedHeaderOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V3_HeaderOrReference? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_NamedHeaderOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_NamedLinkOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V3_LinkOrReference? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_NamedLinkOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_NamedMediaType: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V3_MediaType? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_NamedMediaType) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_NamedParameterOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V3_ParameterOrReference? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_NamedParameterOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_NamedPathItem: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V3_PathItem? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_NamedPathItem) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_NamedRequestBodyOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V3_RequestBodyOrReference? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_NamedRequestBodyOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_NamedResponseOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V3_ResponseOrReference? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_NamedResponseOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_NamedSchemaOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V3_SchemaOrReference? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_NamedSchemaOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_NamedSecuritySchemeOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V3_SecuritySchemeOrReference? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_NamedSecuritySchemeOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_NamedServerVariable: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Openapi_V3_ServerVariable? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_NamedServerVariable) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_NamedString: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_NamedString) -> Bool { + if self.name != other.name {return false} + if self.value != other.value {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_OauthFlow: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "authorization_url"), + 2: .standard(proto: "token_url"), + 3: .standard(proto: "refresh_url"), + 4: .same(proto: "scopes"), + 5: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _authorizationURL: String = String() + var _tokenURL: String = String() + var _refreshURL: String = String() + var _scopes: Openapi_V3_Strings? = nil + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _authorizationURL = source._authorizationURL + _tokenURL = source._tokenURL + _refreshURL = source._refreshURL + _scopes = source._scopes + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_OauthFlow) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._authorizationURL != other_storage._authorizationURL {return false} + if _storage._tokenURL != other_storage._tokenURL {return false} + if _storage._refreshURL != other_storage._refreshURL {return false} + if _storage._scopes != other_storage._scopes {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_OauthFlows: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "implicit"), + 2: .same(proto: "password"), + 3: .standard(proto: "client_credentials"), + 4: .standard(proto: "authorization_code"), + 5: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _implicit: Openapi_V3_OauthFlow? = nil + var _password: Openapi_V3_OauthFlow? = nil + var _clientCredentials: Openapi_V3_OauthFlow? = nil + var _authorizationCode: Openapi_V3_OauthFlow? = nil + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _implicit = source._implicit + _password = source._password + _clientCredentials = source._clientCredentials + _authorizationCode = source._authorizationCode + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_OauthFlows) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._implicit != other_storage._implicit {return false} + if _storage._password != other_storage._password {return false} + if _storage._clientCredentials != other_storage._clientCredentials {return false} + if _storage._authorizationCode != other_storage._authorizationCode {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Object: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Object) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Operation: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "tags"), + 2: .same(proto: "summary"), + 3: .same(proto: "description"), + 4: .standard(proto: "external_docs"), + 5: .standard(proto: "operation_id"), + 6: .same(proto: "parameters"), + 7: .standard(proto: "request_body"), + 8: .same(proto: "responses"), + 9: .same(proto: "callbacks"), + 10: .same(proto: "deprecated"), + 11: .same(proto: "security"), + 12: .same(proto: "servers"), + 13: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _tags: [String] = [] + var _summary: String = String() + var _description_p: String = String() + var _externalDocs: Openapi_V3_ExternalDocs? = nil + var _operationID: String = String() + var _parameters: [Openapi_V3_ParameterOrReference] = [] + var _requestBody: Openapi_V3_RequestBodyOrReference? = nil + var _responses: Openapi_V3_Responses? = nil + var _callbacks: Openapi_V3_CallbacksOrReferences? = nil + var _deprecated: Bool = false + var _security: [Openapi_V3_SecurityRequirement] = [] + var _servers: [Openapi_V3_Server] = [] + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _tags = source._tags + _summary = source._summary + _description_p = source._description_p + _externalDocs = source._externalDocs + _operationID = source._operationID + _parameters = source._parameters + _requestBody = source._requestBody + _responses = source._responses + _callbacks = source._callbacks + _deprecated = source._deprecated + _security = source._security + _servers = source._servers + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Operation) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._tags != other_storage._tags {return false} + if _storage._summary != other_storage._summary {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._externalDocs != other_storage._externalDocs {return false} + if _storage._operationID != other_storage._operationID {return false} + if _storage._parameters != other_storage._parameters {return false} + if _storage._requestBody != other_storage._requestBody {return false} + if _storage._responses != other_storage._responses {return false} + if _storage._callbacks != other_storage._callbacks {return false} + if _storage._deprecated != other_storage._deprecated {return false} + if _storage._security != other_storage._security {return false} + if _storage._servers != other_storage._servers {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Parameter: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "in"), + 3: .same(proto: "description"), + 4: .same(proto: "required"), + 5: .same(proto: "deprecated"), + 6: .standard(proto: "allow_empty_value"), + 7: .same(proto: "style"), + 8: .same(proto: "explode"), + 9: .standard(proto: "allow_reserved"), + 10: .same(proto: "schema"), + 11: .same(proto: "example"), + 12: .same(proto: "examples"), + 13: .same(proto: "content"), + 14: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _in: String = String() + var _description_p: String = String() + var _required: Bool = false + var _deprecated: Bool = false + var _allowEmptyValue: Bool = false + var _style: String = String() + var _explode: Bool = false + var _allowReserved: Bool = false + var _schema: Openapi_V3_SchemaOrReference? = nil + var _example: Openapi_V3_Any? = nil + var _examples: Openapi_V3_ExamplesOrReferences? = nil + var _content: Openapi_V3_MediaTypes? = nil + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _in = source._in + _description_p = source._description_p + _required = source._required + _deprecated = source._deprecated + _allowEmptyValue = source._allowEmptyValue + _style = source._style + _explode = source._explode + _allowReserved = source._allowReserved + _schema = source._schema + _example = source._example + _examples = source._examples + _content = source._content + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Parameter) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._in != other_storage._in {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._required != other_storage._required {return false} + if _storage._deprecated != other_storage._deprecated {return false} + if _storage._allowEmptyValue != other_storage._allowEmptyValue {return false} + if _storage._style != other_storage._style {return false} + if _storage._explode != other_storage._explode {return false} + if _storage._allowReserved != other_storage._allowReserved {return false} + if _storage._schema != other_storage._schema {return false} + if _storage._example != other_storage._example {return false} + if _storage._examples != other_storage._examples {return false} + if _storage._content != other_storage._content {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_ParameterOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "parameter"), + 2: .same(proto: "reference"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V3_ParameterOrReference.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_ParameterOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_ParametersOrReferences: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_ParametersOrReferences) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_PathItem: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "_ref"), + 2: .same(proto: "summary"), + 3: .same(proto: "description"), + 4: .same(proto: "get"), + 5: .same(proto: "put"), + 6: .same(proto: "post"), + 7: .same(proto: "delete"), + 8: .same(proto: "options"), + 9: .same(proto: "head"), + 10: .same(proto: "patch"), + 11: .same(proto: "trace"), + 12: .same(proto: "servers"), + 13: .same(proto: "parameters"), + 14: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _ref: String = String() + var _summary: String = String() + var _description_p: String = String() + var _get: Openapi_V3_Operation? = nil + var _put: Openapi_V3_Operation? = nil + var _post: Openapi_V3_Operation? = nil + var _delete: Openapi_V3_Operation? = nil + var _options: Openapi_V3_Operation? = nil + var _head: Openapi_V3_Operation? = nil + var _patch: Openapi_V3_Operation? = nil + var _trace: Openapi_V3_Operation? = nil + var _servers: [Openapi_V3_Server] = [] + var _parameters: [Openapi_V3_ParameterOrReference] = [] + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _ref = source._ref + _summary = source._summary + _description_p = source._description_p + _get = source._get + _put = source._put + _post = source._post + _delete = source._delete + _options = source._options + _head = source._head + _patch = source._patch + _trace = source._trace + _servers = source._servers + _parameters = source._parameters + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_PathItem) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._ref != other_storage._ref {return false} + if _storage._summary != other_storage._summary {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._get != other_storage._get {return false} + if _storage._put != other_storage._put {return false} + if _storage._post != other_storage._post {return false} + if _storage._delete != other_storage._delete {return false} + if _storage._options != other_storage._options {return false} + if _storage._head != other_storage._head {return false} + if _storage._patch != other_storage._patch {return false} + if _storage._trace != other_storage._trace {return false} + if _storage._servers != other_storage._servers {return false} + if _storage._parameters != other_storage._parameters {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Paths: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "path"), + 2: .standard(proto: "specification_extension"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Paths) -> Bool { + if self.path != other.path {return false} + if self.specificationExtension != other.specificationExtension {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Properties: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Properties) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Reference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "_ref"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Reference) -> Bool { + if self.ref != other.ref {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_RequestBodiesOrReferences: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_RequestBodiesOrReferences) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_RequestBody: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "description"), + 2: .same(proto: "content"), + 3: .same(proto: "required"), + 4: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _description_p: String = String() + var _content: Openapi_V3_MediaTypes? = nil + var _required: Bool = false + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _description_p = source._description_p + _content = source._content + _required = source._required + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_RequestBody) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._description_p != other_storage._description_p {return false} + if _storage._content != other_storage._content {return false} + if _storage._required != other_storage._required {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_RequestBodyOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "request_body"), + 2: .same(proto: "reference"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V3_RequestBodyOrReference.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_RequestBodyOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Response: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "description"), + 2: .same(proto: "headers"), + 3: .same(proto: "content"), + 4: .same(proto: "links"), + 5: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _description_p: String = String() + var _headers: Openapi_V3_HeadersOrReferences? = nil + var _content: Openapi_V3_MediaTypes? = nil + var _links: Openapi_V3_LinksOrReferences? = nil + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _description_p = source._description_p + _headers = source._headers + _content = source._content + _links = source._links + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Response) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._description_p != other_storage._description_p {return false} + if _storage._headers != other_storage._headers {return false} + if _storage._content != other_storage._content {return false} + if _storage._links != other_storage._links {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_ResponseOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "response"), + 2: .same(proto: "reference"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V3_ResponseOrReference.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_ResponseOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Responses: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "default"), + 2: .standard(proto: "response_or_reference"), + 3: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _default: Openapi_V3_ResponseOrReference? = nil + var _responseOrReference: [Openapi_V3_NamedResponseOrReference] = [] + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _default = source._default + _responseOrReference = source._responseOrReference + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Responses) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._default != other_storage._default {return false} + if _storage._responseOrReference != other_storage._responseOrReference {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_ResponsesOrReferences: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_ResponsesOrReferences) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Schema: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "nullable"), + 2: .same(proto: "discriminator"), + 3: .standard(proto: "read_only"), + 4: .standard(proto: "write_only"), + 5: .same(proto: "xml"), + 6: .standard(proto: "external_docs"), + 7: .same(proto: "example"), + 8: .same(proto: "deprecated"), + 9: .same(proto: "title"), + 10: .standard(proto: "multiple_of"), + 11: .same(proto: "maximum"), + 12: .standard(proto: "exclusive_maximum"), + 13: .same(proto: "minimum"), + 14: .standard(proto: "exclusive_minimum"), + 15: .standard(proto: "max_length"), + 16: .standard(proto: "min_length"), + 17: .same(proto: "pattern"), + 18: .standard(proto: "max_items"), + 19: .standard(proto: "min_items"), + 20: .standard(proto: "unique_items"), + 21: .standard(proto: "max_properties"), + 22: .standard(proto: "min_properties"), + 23: .same(proto: "required"), + 24: .same(proto: "enum"), + 25: .same(proto: "type"), + 26: .standard(proto: "all_of"), + 27: .standard(proto: "one_of"), + 28: .standard(proto: "any_of"), + 29: .same(proto: "not"), + 30: .same(proto: "items"), + 31: .same(proto: "properties"), + 32: .standard(proto: "additional_properties"), + 33: .same(proto: "default"), + 34: .same(proto: "description"), + 35: .same(proto: "format"), + 36: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _nullable: Bool = false + var _discriminator: Openapi_V3_Discriminator? = nil + var _readOnly: Bool = false + var _writeOnly: Bool = false + var _xml: Openapi_V3_Xml? = nil + var _externalDocs: Openapi_V3_ExternalDocs? = nil + var _example: Openapi_V3_Any? = nil + var _deprecated: Bool = false + var _title: String = String() + var _multipleOf: Double = 0 + var _maximum: Double = 0 + var _exclusiveMaximum: Bool = false + var _minimum: Double = 0 + var _exclusiveMinimum: Bool = false + var _maxLength: Int64 = 0 + var _minLength: Int64 = 0 + var _pattern: String = String() + var _maxItems: Int64 = 0 + var _minItems: Int64 = 0 + var _uniqueItems: Bool = false + var _maxProperties: Int64 = 0 + var _minProperties: Int64 = 0 + var _required: [String] = [] + var _enum: [Openapi_V3_Any] = [] + var _type: String = String() + var _allOf: [Openapi_V3_SchemaOrReference] = [] + var _oneOf: [Openapi_V3_SchemaOrReference] = [] + var _anyOf: [Openapi_V3_SchemaOrReference] = [] + var _not: Openapi_V3_Schema? = nil + var _items: Openapi_V3_ItemsItem? = nil + var _properties: Openapi_V3_Properties? = nil + var _additionalProperties: Openapi_V3_AdditionalPropertiesItem? = nil + var _default: Openapi_V3_DefaultType? = nil + var _description_p: String = String() + var _format: String = String() + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _nullable = source._nullable + _discriminator = source._discriminator + _readOnly = source._readOnly + _writeOnly = source._writeOnly + _xml = source._xml + _externalDocs = source._externalDocs + _example = source._example + _deprecated = source._deprecated + _title = source._title + _multipleOf = source._multipleOf + _maximum = source._maximum + _exclusiveMaximum = source._exclusiveMaximum + _minimum = source._minimum + _exclusiveMinimum = source._exclusiveMinimum + _maxLength = source._maxLength + _minLength = source._minLength + _pattern = source._pattern + _maxItems = source._maxItems + _minItems = source._minItems + _uniqueItems = source._uniqueItems + _maxProperties = source._maxProperties + _minProperties = source._minProperties + _required = source._required + _enum = source._enum + _type = source._type + _allOf = source._allOf + _oneOf = source._oneOf + _anyOf = source._anyOf + _not = source._not + _items = source._items + _properties = source._properties + _additionalProperties = source._additionalProperties + _default = source._default + _description_p = source._description_p + _format = source._format + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Schema) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._nullable != other_storage._nullable {return false} + if _storage._discriminator != other_storage._discriminator {return false} + if _storage._readOnly != other_storage._readOnly {return false} + if _storage._writeOnly != other_storage._writeOnly {return false} + if _storage._xml != other_storage._xml {return false} + if _storage._externalDocs != other_storage._externalDocs {return false} + if _storage._example != other_storage._example {return false} + if _storage._deprecated != other_storage._deprecated {return false} + if _storage._title != other_storage._title {return false} + if _storage._multipleOf != other_storage._multipleOf {return false} + if _storage._maximum != other_storage._maximum {return false} + if _storage._exclusiveMaximum != other_storage._exclusiveMaximum {return false} + if _storage._minimum != other_storage._minimum {return false} + if _storage._exclusiveMinimum != other_storage._exclusiveMinimum {return false} + if _storage._maxLength != other_storage._maxLength {return false} + if _storage._minLength != other_storage._minLength {return false} + if _storage._pattern != other_storage._pattern {return false} + if _storage._maxItems != other_storage._maxItems {return false} + if _storage._minItems != other_storage._minItems {return false} + if _storage._uniqueItems != other_storage._uniqueItems {return false} + if _storage._maxProperties != other_storage._maxProperties {return false} + if _storage._minProperties != other_storage._minProperties {return false} + if _storage._required != other_storage._required {return false} + if _storage._enum != other_storage._enum {return false} + if _storage._type != other_storage._type {return false} + if _storage._allOf != other_storage._allOf {return false} + if _storage._oneOf != other_storage._oneOf {return false} + if _storage._anyOf != other_storage._anyOf {return false} + if _storage._not != other_storage._not {return false} + if _storage._items != other_storage._items {return false} + if _storage._properties != other_storage._properties {return false} + if _storage._additionalProperties != other_storage._additionalProperties {return false} + if _storage._default != other_storage._default {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._format != other_storage._format {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_SchemaOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "schema"), + 2: .same(proto: "reference"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V3_SchemaOrReference.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_SchemaOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_SchemasOrReferences: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_SchemasOrReferences) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_SecurityRequirement: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap = SwiftProtobuf._NameMap() + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_SecurityRequirement) -> Bool { + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_SecurityScheme: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "type"), + 2: .same(proto: "description"), + 3: .same(proto: "name"), + 4: .same(proto: "in"), + 5: .same(proto: "scheme"), + 6: .standard(proto: "bearer_format"), + 7: .same(proto: "flows"), + 8: .standard(proto: "open_id_connect_url"), + 9: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _type: String = String() + var _description_p: String = String() + var _name: String = String() + var _in: String = String() + var _scheme: String = String() + var _bearerFormat: String = String() + var _flows: Openapi_V3_OauthFlows? = nil + var _openIDConnectURL: String = String() + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _type = source._type + _description_p = source._description_p + _name = source._name + _in = source._in + _scheme = source._scheme + _bearerFormat = source._bearerFormat + _flows = source._flows + _openIDConnectURL = source._openIDConnectURL + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_SecurityScheme) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._type != other_storage._type {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._name != other_storage._name {return false} + if _storage._in != other_storage._in {return false} + if _storage._scheme != other_storage._scheme {return false} + if _storage._bearerFormat != other_storage._bearerFormat {return false} + if _storage._flows != other_storage._flows {return false} + if _storage._openIDConnectURL != other_storage._openIDConnectURL {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_SecuritySchemeOrReference: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "security_scheme"), + 2: .same(proto: "reference"), + ] + + fileprivate class _StorageClass { + var _oneof: Openapi_V3_SecuritySchemeOrReference.OneOf_Oneof? + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oneof = source._oneof + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_SecuritySchemeOrReference) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oneof != other_storage._oneof {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_SecuritySchemesOrReferences: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_SecuritySchemesOrReferences) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Server: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "url"), + 2: .same(proto: "description"), + 3: .same(proto: "variables"), + 4: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _url: String = String() + var _description_p: String = String() + var _variables: Openapi_V3_ServerVariables? = nil + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _url = source._url + _description_p = source._description_p + _variables = source._variables + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Server) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._url != other_storage._url {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._variables != other_storage._variables {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_ServerVariable: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "enum"), + 2: .same(proto: "default"), + 3: .same(proto: "description"), + 4: .standard(proto: "specification_extension"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_ServerVariable) -> Bool { + if self.`enum` != other.`enum` {return false} + if self.`default` != other.`default` {return false} + if self.description_p != other.description_p {return false} + if self.specificationExtension != other.specificationExtension {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_ServerVariables: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_ServerVariables) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_SpecificationExtension: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "number"), + 2: .same(proto: "boolean"), + 3: .same(proto: "string"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_SpecificationExtension) -> Bool { + if self.oneof != other.oneof {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_StringArray: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "value"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_StringArray) -> Bool { + if self.value != other.value {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Strings: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Strings) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Tag: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "description"), + 3: .standard(proto: "external_docs"), + 4: .standard(proto: "specification_extension"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _description_p: String = String() + var _externalDocs: Openapi_V3_ExternalDocs? = nil + var _specificationExtension: [Openapi_V3_NamedAny] = [] + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _description_p = source._description_p + _externalDocs = source._externalDocs + _specificationExtension = source._specificationExtension + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Tag) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._externalDocs != other_storage._externalDocs {return false} + if _storage._specificationExtension != other_storage._specificationExtension {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Openapi_V3_Xml: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "namespace"), + 3: .same(proto: "prefix"), + 4: .same(proto: "attribute"), + 5: .same(proto: "wrapped"), + 6: .standard(proto: "specification_extension"), + ] + + public func _protobuf_generated_isEqualTo(other: Openapi_V3_Xml) -> Bool { + if self.name != other.name {return false} + if self.namespace != other.namespace {return false} + if self.prefix != other.prefix {return false} + if self.attribute != other.attribute {return false} + if self.wrapped != other.wrapped {return false} + if self.specificationExtension != other.specificationExtension {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-sample/Sources/Gnostic/discovery.pb.swift b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-sample/Sources/Gnostic/discovery.pb.swift new file mode 100644 index 000000000..dcca2ce0c --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-sample/Sources/Gnostic/discovery.pb.swift @@ -0,0 +1,3195 @@ +// DO NOT EDIT. +// +// Generated by the Swift generator plugin for the protocol buffer compiler. +// Source: github.com/googleapis/gnostic/discovery/discovery.proto +// +// For information on using the generated types, please see the documenation: +// https://github.com/apple/swift-protobuf/ + +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// THIS FILE IS AUTOMATICALLY GENERATED. + +import Foundation +import SwiftProtobuf + +// If the compiler emits an error on this type, it is because this file +// was generated by a version of the `protoc` Swift plug-in that is +// incompatible with the version of SwiftProtobuf to which you are linking. +// Please ensure that your are building against the same version of the API +// that was used to generate this file. +fileprivate struct _GeneratedWithProtocGenSwiftVersion: SwiftProtobuf.ProtobufAPIVersionCheck { + struct _2: SwiftProtobuf.ProtobufAPIVersion_2 {} + typealias Version = _2 +} + +public struct Discovery_V1_Annotations: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Annotations" + + public var required: [String] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedStringField(value: &self.required) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.required.isEmpty { + try visitor.visitRepeatedStringField(value: self.required, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Discovery_V1_Any: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Any" + + public var value: SwiftProtobuf.Google_Protobuf_Any { + get {return _storage._value ?? SwiftProtobuf.Google_Protobuf_Any()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var yaml: String { + get {return _storage._yaml} + set {_uniqueStorage()._yaml = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularMessageField(value: &_storage._value) + case 2: try decoder.decodeSingularStringField(value: &_storage._yaml) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + } + if !_storage._yaml.isEmpty { + try visitor.visitSingularStringField(value: _storage._yaml, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Discovery_V1_Auth: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Auth" + + public var oauth2: Discovery_V1_Oauth2 { + get {return _storage._oauth2 ?? Discovery_V1_Oauth2()} + set {_uniqueStorage()._oauth2 = newValue} + } + /// Returns true if `oauth2` has been explicitly set. + public var hasOauth2: Bool {return _storage._oauth2 != nil} + /// Clears the value of `oauth2`. Subsequent reads from it will return its default value. + public mutating func clearOauth2() {_storage._oauth2 = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularMessageField(value: &_storage._oauth2) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if let v = _storage._oauth2 { + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Discovery_V1_Document: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Document" + + public var kind: String { + get {return _storage._kind} + set {_uniqueStorage()._kind = newValue} + } + + public var discoveryVersion: String { + get {return _storage._discoveryVersion} + set {_uniqueStorage()._discoveryVersion = newValue} + } + + public var id: String { + get {return _storage._id} + set {_uniqueStorage()._id = newValue} + } + + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + public var version: String { + get {return _storage._version} + set {_uniqueStorage()._version = newValue} + } + + public var revision: String { + get {return _storage._revision} + set {_uniqueStorage()._revision = newValue} + } + + public var title: String { + get {return _storage._title} + set {_uniqueStorage()._title = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var icons: Discovery_V1_Icons { + get {return _storage._icons ?? Discovery_V1_Icons()} + set {_uniqueStorage()._icons = newValue} + } + /// Returns true if `icons` has been explicitly set. + public var hasIcons: Bool {return _storage._icons != nil} + /// Clears the value of `icons`. Subsequent reads from it will return its default value. + public mutating func clearIcons() {_storage._icons = nil} + + public var documentationLink: String { + get {return _storage._documentationLink} + set {_uniqueStorage()._documentationLink = newValue} + } + + public var labels: [String] { + get {return _storage._labels} + set {_uniqueStorage()._labels = newValue} + } + + public var `protocol`: String { + get {return _storage._protocol} + set {_uniqueStorage()._protocol = newValue} + } + + public var baseURL: String { + get {return _storage._baseURL} + set {_uniqueStorage()._baseURL = newValue} + } + + public var basePath: String { + get {return _storage._basePath} + set {_uniqueStorage()._basePath = newValue} + } + + public var rootURL: String { + get {return _storage._rootURL} + set {_uniqueStorage()._rootURL = newValue} + } + + public var servicePath: String { + get {return _storage._servicePath} + set {_uniqueStorage()._servicePath = newValue} + } + + public var batchPath: String { + get {return _storage._batchPath} + set {_uniqueStorage()._batchPath = newValue} + } + + public var parameters: Discovery_V1_Parameters { + get {return _storage._parameters ?? Discovery_V1_Parameters()} + set {_uniqueStorage()._parameters = newValue} + } + /// Returns true if `parameters` has been explicitly set. + public var hasParameters: Bool {return _storage._parameters != nil} + /// Clears the value of `parameters`. Subsequent reads from it will return its default value. + public mutating func clearParameters() {_storage._parameters = nil} + + public var auth: Discovery_V1_Auth { + get {return _storage._auth ?? Discovery_V1_Auth()} + set {_uniqueStorage()._auth = newValue} + } + /// Returns true if `auth` has been explicitly set. + public var hasAuth: Bool {return _storage._auth != nil} + /// Clears the value of `auth`. Subsequent reads from it will return its default value. + public mutating func clearAuth() {_storage._auth = nil} + + public var features: [String] { + get {return _storage._features} + set {_uniqueStorage()._features = newValue} + } + + public var schemas: Discovery_V1_Schemas { + get {return _storage._schemas ?? Discovery_V1_Schemas()} + set {_uniqueStorage()._schemas = newValue} + } + /// Returns true if `schemas` has been explicitly set. + public var hasSchemas: Bool {return _storage._schemas != nil} + /// Clears the value of `schemas`. Subsequent reads from it will return its default value. + public mutating func clearSchemas() {_storage._schemas = nil} + + public var methods: Discovery_V1_Methods { + get {return _storage._methods ?? Discovery_V1_Methods()} + set {_uniqueStorage()._methods = newValue} + } + /// Returns true if `methods` has been explicitly set. + public var hasMethods: Bool {return _storage._methods != nil} + /// Clears the value of `methods`. Subsequent reads from it will return its default value. + public mutating func clearMethods() {_storage._methods = nil} + + public var resources: Discovery_V1_Resources { + get {return _storage._resources ?? Discovery_V1_Resources()} + set {_uniqueStorage()._resources = newValue} + } + /// Returns true if `resources` has been explicitly set. + public var hasResources: Bool {return _storage._resources != nil} + /// Clears the value of `resources`. Subsequent reads from it will return its default value. + public mutating func clearResources() {_storage._resources = nil} + + public var etag: String { + get {return _storage._etag} + set {_uniqueStorage()._etag = newValue} + } + + public var ownerDomain: String { + get {return _storage._ownerDomain} + set {_uniqueStorage()._ownerDomain = newValue} + } + + public var ownerName: String { + get {return _storage._ownerName} + set {_uniqueStorage()._ownerName = newValue} + } + + public var versionModule: Bool { + get {return _storage._versionModule} + set {_uniqueStorage()._versionModule = newValue} + } + + public var canonicalName: String { + get {return _storage._canonicalName} + set {_uniqueStorage()._canonicalName = newValue} + } + + public var fullyEncodeReservedExpansion: Bool { + get {return _storage._fullyEncodeReservedExpansion} + set {_uniqueStorage()._fullyEncodeReservedExpansion = newValue} + } + + public var packagePath: String { + get {return _storage._packagePath} + set {_uniqueStorage()._packagePath = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._kind) + case 2: try decoder.decodeSingularStringField(value: &_storage._discoveryVersion) + case 3: try decoder.decodeSingularStringField(value: &_storage._id) + case 4: try decoder.decodeSingularStringField(value: &_storage._name) + case 5: try decoder.decodeSingularStringField(value: &_storage._version) + case 6: try decoder.decodeSingularStringField(value: &_storage._revision) + case 7: try decoder.decodeSingularStringField(value: &_storage._title) + case 8: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 9: try decoder.decodeSingularMessageField(value: &_storage._icons) + case 10: try decoder.decodeSingularStringField(value: &_storage._documentationLink) + case 11: try decoder.decodeRepeatedStringField(value: &_storage._labels) + case 12: try decoder.decodeSingularStringField(value: &_storage._protocol) + case 13: try decoder.decodeSingularStringField(value: &_storage._baseURL) + case 14: try decoder.decodeSingularStringField(value: &_storage._basePath) + case 15: try decoder.decodeSingularStringField(value: &_storage._rootURL) + case 16: try decoder.decodeSingularStringField(value: &_storage._servicePath) + case 17: try decoder.decodeSingularStringField(value: &_storage._batchPath) + case 18: try decoder.decodeSingularMessageField(value: &_storage._parameters) + case 19: try decoder.decodeSingularMessageField(value: &_storage._auth) + case 20: try decoder.decodeRepeatedStringField(value: &_storage._features) + case 21: try decoder.decodeSingularMessageField(value: &_storage._schemas) + case 22: try decoder.decodeSingularMessageField(value: &_storage._methods) + case 23: try decoder.decodeSingularMessageField(value: &_storage._resources) + case 24: try decoder.decodeSingularStringField(value: &_storage._etag) + case 25: try decoder.decodeSingularStringField(value: &_storage._ownerDomain) + case 26: try decoder.decodeSingularStringField(value: &_storage._ownerName) + case 27: try decoder.decodeSingularBoolField(value: &_storage._versionModule) + case 28: try decoder.decodeSingularStringField(value: &_storage._canonicalName) + case 29: try decoder.decodeSingularBoolField(value: &_storage._fullyEncodeReservedExpansion) + case 30: try decoder.decodeSingularStringField(value: &_storage._packagePath) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._kind.isEmpty { + try visitor.visitSingularStringField(value: _storage._kind, fieldNumber: 1) + } + if !_storage._discoveryVersion.isEmpty { + try visitor.visitSingularStringField(value: _storage._discoveryVersion, fieldNumber: 2) + } + if !_storage._id.isEmpty { + try visitor.visitSingularStringField(value: _storage._id, fieldNumber: 3) + } + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 4) + } + if !_storage._version.isEmpty { + try visitor.visitSingularStringField(value: _storage._version, fieldNumber: 5) + } + if !_storage._revision.isEmpty { + try visitor.visitSingularStringField(value: _storage._revision, fieldNumber: 6) + } + if !_storage._title.isEmpty { + try visitor.visitSingularStringField(value: _storage._title, fieldNumber: 7) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 8) + } + if let v = _storage._icons { + try visitor.visitSingularMessageField(value: v, fieldNumber: 9) + } + if !_storage._documentationLink.isEmpty { + try visitor.visitSingularStringField(value: _storage._documentationLink, fieldNumber: 10) + } + if !_storage._labels.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._labels, fieldNumber: 11) + } + if !_storage._protocol.isEmpty { + try visitor.visitSingularStringField(value: _storage._protocol, fieldNumber: 12) + } + if !_storage._baseURL.isEmpty { + try visitor.visitSingularStringField(value: _storage._baseURL, fieldNumber: 13) + } + if !_storage._basePath.isEmpty { + try visitor.visitSingularStringField(value: _storage._basePath, fieldNumber: 14) + } + if !_storage._rootURL.isEmpty { + try visitor.visitSingularStringField(value: _storage._rootURL, fieldNumber: 15) + } + if !_storage._servicePath.isEmpty { + try visitor.visitSingularStringField(value: _storage._servicePath, fieldNumber: 16) + } + if !_storage._batchPath.isEmpty { + try visitor.visitSingularStringField(value: _storage._batchPath, fieldNumber: 17) + } + if let v = _storage._parameters { + try visitor.visitSingularMessageField(value: v, fieldNumber: 18) + } + if let v = _storage._auth { + try visitor.visitSingularMessageField(value: v, fieldNumber: 19) + } + if !_storage._features.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._features, fieldNumber: 20) + } + if let v = _storage._schemas { + try visitor.visitSingularMessageField(value: v, fieldNumber: 21) + } + if let v = _storage._methods { + try visitor.visitSingularMessageField(value: v, fieldNumber: 22) + } + if let v = _storage._resources { + try visitor.visitSingularMessageField(value: v, fieldNumber: 23) + } + if !_storage._etag.isEmpty { + try visitor.visitSingularStringField(value: _storage._etag, fieldNumber: 24) + } + if !_storage._ownerDomain.isEmpty { + try visitor.visitSingularStringField(value: _storage._ownerDomain, fieldNumber: 25) + } + if !_storage._ownerName.isEmpty { + try visitor.visitSingularStringField(value: _storage._ownerName, fieldNumber: 26) + } + if _storage._versionModule != false { + try visitor.visitSingularBoolField(value: _storage._versionModule, fieldNumber: 27) + } + if !_storage._canonicalName.isEmpty { + try visitor.visitSingularStringField(value: _storage._canonicalName, fieldNumber: 28) + } + if _storage._fullyEncodeReservedExpansion != false { + try visitor.visitSingularBoolField(value: _storage._fullyEncodeReservedExpansion, fieldNumber: 29) + } + if !_storage._packagePath.isEmpty { + try visitor.visitSingularStringField(value: _storage._packagePath, fieldNumber: 30) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Icons that represent the API. +public struct Discovery_V1_Icons: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Icons" + + public var x16: String = String() + + public var x32: String = String() + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.x16) + case 2: try decoder.decodeSingularStringField(value: &self.x32) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.x16.isEmpty { + try visitor.visitSingularStringField(value: self.x16, fieldNumber: 1) + } + if !self.x32.isEmpty { + try visitor.visitSingularStringField(value: self.x32, fieldNumber: 2) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Discovery_V1_MediaUpload: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".MediaUpload" + + public var accept: [String] { + get {return _storage._accept} + set {_uniqueStorage()._accept = newValue} + } + + public var maxSize: String { + get {return _storage._maxSize} + set {_uniqueStorage()._maxSize = newValue} + } + + public var protocols: Discovery_V1_Protocols { + get {return _storage._protocols ?? Discovery_V1_Protocols()} + set {_uniqueStorage()._protocols = newValue} + } + /// Returns true if `protocols` has been explicitly set. + public var hasProtocols: Bool {return _storage._protocols != nil} + /// Clears the value of `protocols`. Subsequent reads from it will return its default value. + public mutating func clearProtocols() {_storage._protocols = nil} + + public var supportsSubscription: Bool { + get {return _storage._supportsSubscription} + set {_uniqueStorage()._supportsSubscription = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedStringField(value: &_storage._accept) + case 2: try decoder.decodeSingularStringField(value: &_storage._maxSize) + case 3: try decoder.decodeSingularMessageField(value: &_storage._protocols) + case 4: try decoder.decodeSingularBoolField(value: &_storage._supportsSubscription) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._accept.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._accept, fieldNumber: 1) + } + if !_storage._maxSize.isEmpty { + try visitor.visitSingularStringField(value: _storage._maxSize, fieldNumber: 2) + } + if let v = _storage._protocols { + try visitor.visitSingularMessageField(value: v, fieldNumber: 3) + } + if _storage._supportsSubscription != false { + try visitor.visitSingularBoolField(value: _storage._supportsSubscription, fieldNumber: 4) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Discovery_V1_Method: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Method" + + public var id: String { + get {return _storage._id} + set {_uniqueStorage()._id = newValue} + } + + public var path: String { + get {return _storage._path} + set {_uniqueStorage()._path = newValue} + } + + public var httpMethod: String { + get {return _storage._httpMethod} + set {_uniqueStorage()._httpMethod = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var parameters: Discovery_V1_Parameters { + get {return _storage._parameters ?? Discovery_V1_Parameters()} + set {_uniqueStorage()._parameters = newValue} + } + /// Returns true if `parameters` has been explicitly set. + public var hasParameters: Bool {return _storage._parameters != nil} + /// Clears the value of `parameters`. Subsequent reads from it will return its default value. + public mutating func clearParameters() {_storage._parameters = nil} + + public var parameterOrder: [String] { + get {return _storage._parameterOrder} + set {_uniqueStorage()._parameterOrder = newValue} + } + + public var request: Discovery_V1_Request { + get {return _storage._request ?? Discovery_V1_Request()} + set {_uniqueStorage()._request = newValue} + } + /// Returns true if `request` has been explicitly set. + public var hasRequest: Bool {return _storage._request != nil} + /// Clears the value of `request`. Subsequent reads from it will return its default value. + public mutating func clearRequest() {_storage._request = nil} + + public var response: Discovery_V1_Response { + get {return _storage._response ?? Discovery_V1_Response()} + set {_uniqueStorage()._response = newValue} + } + /// Returns true if `response` has been explicitly set. + public var hasResponse: Bool {return _storage._response != nil} + /// Clears the value of `response`. Subsequent reads from it will return its default value. + public mutating func clearResponse() {_storage._response = nil} + + public var scopes: [String] { + get {return _storage._scopes} + set {_uniqueStorage()._scopes = newValue} + } + + public var supportsMediaDownload: Bool { + get {return _storage._supportsMediaDownload} + set {_uniqueStorage()._supportsMediaDownload = newValue} + } + + public var supportsMediaUpload: Bool { + get {return _storage._supportsMediaUpload} + set {_uniqueStorage()._supportsMediaUpload = newValue} + } + + public var useMediaDownloadService: Bool { + get {return _storage._useMediaDownloadService} + set {_uniqueStorage()._useMediaDownloadService = newValue} + } + + public var mediaUpload: Discovery_V1_MediaUpload { + get {return _storage._mediaUpload ?? Discovery_V1_MediaUpload()} + set {_uniqueStorage()._mediaUpload = newValue} + } + /// Returns true if `mediaUpload` has been explicitly set. + public var hasMediaUpload: Bool {return _storage._mediaUpload != nil} + /// Clears the value of `mediaUpload`. Subsequent reads from it will return its default value. + public mutating func clearMediaUpload() {_storage._mediaUpload = nil} + + public var supportsSubscription: Bool { + get {return _storage._supportsSubscription} + set {_uniqueStorage()._supportsSubscription = newValue} + } + + public var flatPath: String { + get {return _storage._flatPath} + set {_uniqueStorage()._flatPath = newValue} + } + + public var etagRequired: Bool { + get {return _storage._etagRequired} + set {_uniqueStorage()._etagRequired = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._id) + case 2: try decoder.decodeSingularStringField(value: &_storage._path) + case 3: try decoder.decodeSingularStringField(value: &_storage._httpMethod) + case 4: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 5: try decoder.decodeSingularMessageField(value: &_storage._parameters) + case 6: try decoder.decodeRepeatedStringField(value: &_storage._parameterOrder) + case 7: try decoder.decodeSingularMessageField(value: &_storage._request) + case 8: try decoder.decodeSingularMessageField(value: &_storage._response) + case 9: try decoder.decodeRepeatedStringField(value: &_storage._scopes) + case 10: try decoder.decodeSingularBoolField(value: &_storage._supportsMediaDownload) + case 11: try decoder.decodeSingularBoolField(value: &_storage._supportsMediaUpload) + case 12: try decoder.decodeSingularBoolField(value: &_storage._useMediaDownloadService) + case 13: try decoder.decodeSingularMessageField(value: &_storage._mediaUpload) + case 14: try decoder.decodeSingularBoolField(value: &_storage._supportsSubscription) + case 15: try decoder.decodeSingularStringField(value: &_storage._flatPath) + case 16: try decoder.decodeSingularBoolField(value: &_storage._etagRequired) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._id.isEmpty { + try visitor.visitSingularStringField(value: _storage._id, fieldNumber: 1) + } + if !_storage._path.isEmpty { + try visitor.visitSingularStringField(value: _storage._path, fieldNumber: 2) + } + if !_storage._httpMethod.isEmpty { + try visitor.visitSingularStringField(value: _storage._httpMethod, fieldNumber: 3) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 4) + } + if let v = _storage._parameters { + try visitor.visitSingularMessageField(value: v, fieldNumber: 5) + } + if !_storage._parameterOrder.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._parameterOrder, fieldNumber: 6) + } + if let v = _storage._request { + try visitor.visitSingularMessageField(value: v, fieldNumber: 7) + } + if let v = _storage._response { + try visitor.visitSingularMessageField(value: v, fieldNumber: 8) + } + if !_storage._scopes.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._scopes, fieldNumber: 9) + } + if _storage._supportsMediaDownload != false { + try visitor.visitSingularBoolField(value: _storage._supportsMediaDownload, fieldNumber: 10) + } + if _storage._supportsMediaUpload != false { + try visitor.visitSingularBoolField(value: _storage._supportsMediaUpload, fieldNumber: 11) + } + if _storage._useMediaDownloadService != false { + try visitor.visitSingularBoolField(value: _storage._useMediaDownloadService, fieldNumber: 12) + } + if let v = _storage._mediaUpload { + try visitor.visitSingularMessageField(value: v, fieldNumber: 13) + } + if _storage._supportsSubscription != false { + try visitor.visitSingularBoolField(value: _storage._supportsSubscription, fieldNumber: 14) + } + if !_storage._flatPath.isEmpty { + try visitor.visitSingularStringField(value: _storage._flatPath, fieldNumber: 15) + } + if _storage._etagRequired != false { + try visitor.visitSingularBoolField(value: _storage._etagRequired, fieldNumber: 16) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Discovery_V1_Methods: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Methods" + + public var additionalProperties: [Discovery_V1_NamedMethod] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Automatically-generated message used to represent maps of Method as ordered (name,value) pairs. +public struct Discovery_V1_NamedMethod: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedMethod" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Discovery_V1_Method { + get {return _storage._value ?? Discovery_V1_Method()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of Parameter as ordered (name,value) pairs. +public struct Discovery_V1_NamedParameter: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedParameter" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Discovery_V1_Parameter { + get {return _storage._value ?? Discovery_V1_Parameter()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of Resource as ordered (name,value) pairs. +public struct Discovery_V1_NamedResource: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedResource" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Discovery_V1_Resource { + get {return _storage._value ?? Discovery_V1_Resource()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of Schema as ordered (name,value) pairs. +public struct Discovery_V1_NamedSchema: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedSchema" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Discovery_V1_Schema { + get {return _storage._value ?? Discovery_V1_Schema()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// Automatically-generated message used to represent maps of Scope as ordered (name,value) pairs. +public struct Discovery_V1_NamedScope: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".NamedScope" + + /// Map key + public var name: String { + get {return _storage._name} + set {_uniqueStorage()._name = newValue} + } + + /// Mapped value + public var value: Discovery_V1_Scope { + get {return _storage._value ?? Discovery_V1_Scope()} + set {_uniqueStorage()._value = newValue} + } + /// Returns true if `value` has been explicitly set. + public var hasValue: Bool {return _storage._value != nil} + /// Clears the value of `value`. Subsequent reads from it will return its default value. + public mutating func clearValue() {_storage._value = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._name) + case 2: try decoder.decodeSingularMessageField(value: &_storage._value) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._name.isEmpty { + try visitor.visitSingularStringField(value: _storage._name, fieldNumber: 1) + } + if let v = _storage._value { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Discovery_V1_Oauth2: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Oauth2" + + public var scopes: Discovery_V1_Scopes { + get {return _storage._scopes ?? Discovery_V1_Scopes()} + set {_uniqueStorage()._scopes = newValue} + } + /// Returns true if `scopes` has been explicitly set. + public var hasScopes: Bool {return _storage._scopes != nil} + /// Clears the value of `scopes`. Subsequent reads from it will return its default value. + public mutating func clearScopes() {_storage._scopes = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularMessageField(value: &_storage._scopes) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if let v = _storage._scopes { + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Discovery_V1_Parameter: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Parameter" + + public var id: String { + get {return _storage._id} + set {_uniqueStorage()._id = newValue} + } + + public var type: String { + get {return _storage._type} + set {_uniqueStorage()._type = newValue} + } + + public var ref: String { + get {return _storage._ref} + set {_uniqueStorage()._ref = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var `default`: String { + get {return _storage._default} + set {_uniqueStorage()._default = newValue} + } + + public var required: Bool { + get {return _storage._required} + set {_uniqueStorage()._required = newValue} + } + + public var format: String { + get {return _storage._format} + set {_uniqueStorage()._format = newValue} + } + + public var pattern: String { + get {return _storage._pattern} + set {_uniqueStorage()._pattern = newValue} + } + + public var minimum: String { + get {return _storage._minimum} + set {_uniqueStorage()._minimum = newValue} + } + + public var maximum: String { + get {return _storage._maximum} + set {_uniqueStorage()._maximum = newValue} + } + + public var `enum`: [String] { + get {return _storage._enum} + set {_uniqueStorage()._enum = newValue} + } + + public var enumDescriptions: [String] { + get {return _storage._enumDescriptions} + set {_uniqueStorage()._enumDescriptions = newValue} + } + + public var repeated: Bool { + get {return _storage._repeated} + set {_uniqueStorage()._repeated = newValue} + } + + public var location: String { + get {return _storage._location} + set {_uniqueStorage()._location = newValue} + } + + public var properties: Discovery_V1_Schemas { + get {return _storage._properties ?? Discovery_V1_Schemas()} + set {_uniqueStorage()._properties = newValue} + } + /// Returns true if `properties` has been explicitly set. + public var hasProperties: Bool {return _storage._properties != nil} + /// Clears the value of `properties`. Subsequent reads from it will return its default value. + public mutating func clearProperties() {_storage._properties = nil} + + public var additionalProperties: Discovery_V1_Schema { + get {return _storage._additionalProperties ?? Discovery_V1_Schema()} + set {_uniqueStorage()._additionalProperties = newValue} + } + /// Returns true if `additionalProperties` has been explicitly set. + public var hasAdditionalProperties: Bool {return _storage._additionalProperties != nil} + /// Clears the value of `additionalProperties`. Subsequent reads from it will return its default value. + public mutating func clearAdditionalProperties() {_storage._additionalProperties = nil} + + public var items: Discovery_V1_Schema { + get {return _storage._items ?? Discovery_V1_Schema()} + set {_uniqueStorage()._items = newValue} + } + /// Returns true if `items` has been explicitly set. + public var hasItems: Bool {return _storage._items != nil} + /// Clears the value of `items`. Subsequent reads from it will return its default value. + public mutating func clearItems() {_storage._items = nil} + + public var annotations: Discovery_V1_Annotations { + get {return _storage._annotations ?? Discovery_V1_Annotations()} + set {_uniqueStorage()._annotations = newValue} + } + /// Returns true if `annotations` has been explicitly set. + public var hasAnnotations: Bool {return _storage._annotations != nil} + /// Clears the value of `annotations`. Subsequent reads from it will return its default value. + public mutating func clearAnnotations() {_storage._annotations = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._id) + case 2: try decoder.decodeSingularStringField(value: &_storage._type) + case 3: try decoder.decodeSingularStringField(value: &_storage._ref) + case 4: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 5: try decoder.decodeSingularStringField(value: &_storage._default) + case 6: try decoder.decodeSingularBoolField(value: &_storage._required) + case 7: try decoder.decodeSingularStringField(value: &_storage._format) + case 8: try decoder.decodeSingularStringField(value: &_storage._pattern) + case 9: try decoder.decodeSingularStringField(value: &_storage._minimum) + case 10: try decoder.decodeSingularStringField(value: &_storage._maximum) + case 11: try decoder.decodeRepeatedStringField(value: &_storage._enum) + case 12: try decoder.decodeRepeatedStringField(value: &_storage._enumDescriptions) + case 13: try decoder.decodeSingularBoolField(value: &_storage._repeated) + case 14: try decoder.decodeSingularStringField(value: &_storage._location) + case 15: try decoder.decodeSingularMessageField(value: &_storage._properties) + case 16: try decoder.decodeSingularMessageField(value: &_storage._additionalProperties) + case 17: try decoder.decodeSingularMessageField(value: &_storage._items) + case 18: try decoder.decodeSingularMessageField(value: &_storage._annotations) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._id.isEmpty { + try visitor.visitSingularStringField(value: _storage._id, fieldNumber: 1) + } + if !_storage._type.isEmpty { + try visitor.visitSingularStringField(value: _storage._type, fieldNumber: 2) + } + if !_storage._ref.isEmpty { + try visitor.visitSingularStringField(value: _storage._ref, fieldNumber: 3) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 4) + } + if !_storage._default.isEmpty { + try visitor.visitSingularStringField(value: _storage._default, fieldNumber: 5) + } + if _storage._required != false { + try visitor.visitSingularBoolField(value: _storage._required, fieldNumber: 6) + } + if !_storage._format.isEmpty { + try visitor.visitSingularStringField(value: _storage._format, fieldNumber: 7) + } + if !_storage._pattern.isEmpty { + try visitor.visitSingularStringField(value: _storage._pattern, fieldNumber: 8) + } + if !_storage._minimum.isEmpty { + try visitor.visitSingularStringField(value: _storage._minimum, fieldNumber: 9) + } + if !_storage._maximum.isEmpty { + try visitor.visitSingularStringField(value: _storage._maximum, fieldNumber: 10) + } + if !_storage._enum.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._enum, fieldNumber: 11) + } + if !_storage._enumDescriptions.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._enumDescriptions, fieldNumber: 12) + } + if _storage._repeated != false { + try visitor.visitSingularBoolField(value: _storage._repeated, fieldNumber: 13) + } + if !_storage._location.isEmpty { + try visitor.visitSingularStringField(value: _storage._location, fieldNumber: 14) + } + if let v = _storage._properties { + try visitor.visitSingularMessageField(value: v, fieldNumber: 15) + } + if let v = _storage._additionalProperties { + try visitor.visitSingularMessageField(value: v, fieldNumber: 16) + } + if let v = _storage._items { + try visitor.visitSingularMessageField(value: v, fieldNumber: 17) + } + if let v = _storage._annotations { + try visitor.visitSingularMessageField(value: v, fieldNumber: 18) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Discovery_V1_Parameters: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Parameters" + + public var additionalProperties: [Discovery_V1_NamedParameter] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Discovery_V1_Protocols: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Protocols" + + public var simple: Discovery_V1_Simple { + get {return _storage._simple ?? Discovery_V1_Simple()} + set {_uniqueStorage()._simple = newValue} + } + /// Returns true if `simple` has been explicitly set. + public var hasSimple: Bool {return _storage._simple != nil} + /// Clears the value of `simple`. Subsequent reads from it will return its default value. + public mutating func clearSimple() {_storage._simple = nil} + + public var resumable: Discovery_V1_Resumable { + get {return _storage._resumable ?? Discovery_V1_Resumable()} + set {_uniqueStorage()._resumable = newValue} + } + /// Returns true if `resumable` has been explicitly set. + public var hasResumable: Bool {return _storage._resumable != nil} + /// Clears the value of `resumable`. Subsequent reads from it will return its default value. + public mutating func clearResumable() {_storage._resumable = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularMessageField(value: &_storage._simple) + case 2: try decoder.decodeSingularMessageField(value: &_storage._resumable) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if let v = _storage._simple { + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + } + if let v = _storage._resumable { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Discovery_V1_Request: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Request" + + public var ref: String = String() + + public var parameterName: String = String() + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.ref) + case 2: try decoder.decodeSingularStringField(value: &self.parameterName) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.ref.isEmpty { + try visitor.visitSingularStringField(value: self.ref, fieldNumber: 1) + } + if !self.parameterName.isEmpty { + try visitor.visitSingularStringField(value: self.parameterName, fieldNumber: 2) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Discovery_V1_Resource: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Resource" + + public var methods: Discovery_V1_Methods { + get {return _storage._methods ?? Discovery_V1_Methods()} + set {_uniqueStorage()._methods = newValue} + } + /// Returns true if `methods` has been explicitly set. + public var hasMethods: Bool {return _storage._methods != nil} + /// Clears the value of `methods`. Subsequent reads from it will return its default value. + public mutating func clearMethods() {_storage._methods = nil} + + public var resources: Discovery_V1_Resources { + get {return _storage._resources ?? Discovery_V1_Resources()} + set {_uniqueStorage()._resources = newValue} + } + /// Returns true if `resources` has been explicitly set. + public var hasResources: Bool {return _storage._resources != nil} + /// Clears the value of `resources`. Subsequent reads from it will return its default value. + public mutating func clearResources() {_storage._resources = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularMessageField(value: &_storage._methods) + case 2: try decoder.decodeSingularMessageField(value: &_storage._resources) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if let v = _storage._methods { + try visitor.visitSingularMessageField(value: v, fieldNumber: 1) + } + if let v = _storage._resources { + try visitor.visitSingularMessageField(value: v, fieldNumber: 2) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Discovery_V1_Resources: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Resources" + + public var additionalProperties: [Discovery_V1_NamedResource] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Discovery_V1_Response: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Response" + + public var ref: String = String() + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.ref) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.ref.isEmpty { + try visitor.visitSingularStringField(value: self.ref, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Discovery_V1_Resumable: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Resumable" + + public var multipart: Bool = false + + public var path: String = String() + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularBoolField(value: &self.multipart) + case 2: try decoder.decodeSingularStringField(value: &self.path) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if self.multipart != false { + try visitor.visitSingularBoolField(value: self.multipart, fieldNumber: 1) + } + if !self.path.isEmpty { + try visitor.visitSingularStringField(value: self.path, fieldNumber: 2) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Discovery_V1_Schema: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Schema" + + public var id: String { + get {return _storage._id} + set {_uniqueStorage()._id = newValue} + } + + public var type: String { + get {return _storage._type} + set {_uniqueStorage()._type = newValue} + } + + public var description_p: String { + get {return _storage._description_p} + set {_uniqueStorage()._description_p = newValue} + } + + public var `default`: String { + get {return _storage._default} + set {_uniqueStorage()._default = newValue} + } + + public var required: Bool { + get {return _storage._required} + set {_uniqueStorage()._required = newValue} + } + + public var format: String { + get {return _storage._format} + set {_uniqueStorage()._format = newValue} + } + + public var pattern: String { + get {return _storage._pattern} + set {_uniqueStorage()._pattern = newValue} + } + + public var minimum: String { + get {return _storage._minimum} + set {_uniqueStorage()._minimum = newValue} + } + + public var maximum: String { + get {return _storage._maximum} + set {_uniqueStorage()._maximum = newValue} + } + + public var `enum`: [String] { + get {return _storage._enum} + set {_uniqueStorage()._enum = newValue} + } + + public var enumDescriptions: [String] { + get {return _storage._enumDescriptions} + set {_uniqueStorage()._enumDescriptions = newValue} + } + + public var repeated: Bool { + get {return _storage._repeated} + set {_uniqueStorage()._repeated = newValue} + } + + public var location: String { + get {return _storage._location} + set {_uniqueStorage()._location = newValue} + } + + public var properties: Discovery_V1_Schemas { + get {return _storage._properties ?? Discovery_V1_Schemas()} + set {_uniqueStorage()._properties = newValue} + } + /// Returns true if `properties` has been explicitly set. + public var hasProperties: Bool {return _storage._properties != nil} + /// Clears the value of `properties`. Subsequent reads from it will return its default value. + public mutating func clearProperties() {_storage._properties = nil} + + public var additionalProperties: Discovery_V1_Schema { + get {return _storage._additionalProperties ?? Discovery_V1_Schema()} + set {_uniqueStorage()._additionalProperties = newValue} + } + /// Returns true if `additionalProperties` has been explicitly set. + public var hasAdditionalProperties: Bool {return _storage._additionalProperties != nil} + /// Clears the value of `additionalProperties`. Subsequent reads from it will return its default value. + public mutating func clearAdditionalProperties() {_storage._additionalProperties = nil} + + public var items: Discovery_V1_Schema { + get {return _storage._items ?? Discovery_V1_Schema()} + set {_uniqueStorage()._items = newValue} + } + /// Returns true if `items` has been explicitly set. + public var hasItems: Bool {return _storage._items != nil} + /// Clears the value of `items`. Subsequent reads from it will return its default value. + public mutating func clearItems() {_storage._items = nil} + + public var ref: String { + get {return _storage._ref} + set {_uniqueStorage()._ref = newValue} + } + + public var annotations: Discovery_V1_Annotations { + get {return _storage._annotations ?? Discovery_V1_Annotations()} + set {_uniqueStorage()._annotations = newValue} + } + /// Returns true if `annotations` has been explicitly set. + public var hasAnnotations: Bool {return _storage._annotations != nil} + /// Clears the value of `annotations`. Subsequent reads from it will return its default value. + public mutating func clearAnnotations() {_storage._annotations = nil} + + public var readOnly: Bool { + get {return _storage._readOnly} + set {_uniqueStorage()._readOnly = newValue} + } + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._id) + case 2: try decoder.decodeSingularStringField(value: &_storage._type) + case 3: try decoder.decodeSingularStringField(value: &_storage._description_p) + case 4: try decoder.decodeSingularStringField(value: &_storage._default) + case 5: try decoder.decodeSingularBoolField(value: &_storage._required) + case 6: try decoder.decodeSingularStringField(value: &_storage._format) + case 7: try decoder.decodeSingularStringField(value: &_storage._pattern) + case 8: try decoder.decodeSingularStringField(value: &_storage._minimum) + case 9: try decoder.decodeSingularStringField(value: &_storage._maximum) + case 10: try decoder.decodeRepeatedStringField(value: &_storage._enum) + case 11: try decoder.decodeRepeatedStringField(value: &_storage._enumDescriptions) + case 12: try decoder.decodeSingularBoolField(value: &_storage._repeated) + case 13: try decoder.decodeSingularStringField(value: &_storage._location) + case 14: try decoder.decodeSingularMessageField(value: &_storage._properties) + case 15: try decoder.decodeSingularMessageField(value: &_storage._additionalProperties) + case 16: try decoder.decodeSingularMessageField(value: &_storage._items) + case 17: try decoder.decodeSingularStringField(value: &_storage._ref) + case 18: try decoder.decodeSingularMessageField(value: &_storage._annotations) + case 19: try decoder.decodeSingularBoolField(value: &_storage._readOnly) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._id.isEmpty { + try visitor.visitSingularStringField(value: _storage._id, fieldNumber: 1) + } + if !_storage._type.isEmpty { + try visitor.visitSingularStringField(value: _storage._type, fieldNumber: 2) + } + if !_storage._description_p.isEmpty { + try visitor.visitSingularStringField(value: _storage._description_p, fieldNumber: 3) + } + if !_storage._default.isEmpty { + try visitor.visitSingularStringField(value: _storage._default, fieldNumber: 4) + } + if _storage._required != false { + try visitor.visitSingularBoolField(value: _storage._required, fieldNumber: 5) + } + if !_storage._format.isEmpty { + try visitor.visitSingularStringField(value: _storage._format, fieldNumber: 6) + } + if !_storage._pattern.isEmpty { + try visitor.visitSingularStringField(value: _storage._pattern, fieldNumber: 7) + } + if !_storage._minimum.isEmpty { + try visitor.visitSingularStringField(value: _storage._minimum, fieldNumber: 8) + } + if !_storage._maximum.isEmpty { + try visitor.visitSingularStringField(value: _storage._maximum, fieldNumber: 9) + } + if !_storage._enum.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._enum, fieldNumber: 10) + } + if !_storage._enumDescriptions.isEmpty { + try visitor.visitRepeatedStringField(value: _storage._enumDescriptions, fieldNumber: 11) + } + if _storage._repeated != false { + try visitor.visitSingularBoolField(value: _storage._repeated, fieldNumber: 12) + } + if !_storage._location.isEmpty { + try visitor.visitSingularStringField(value: _storage._location, fieldNumber: 13) + } + if let v = _storage._properties { + try visitor.visitSingularMessageField(value: v, fieldNumber: 14) + } + if let v = _storage._additionalProperties { + try visitor.visitSingularMessageField(value: v, fieldNumber: 15) + } + if let v = _storage._items { + try visitor.visitSingularMessageField(value: v, fieldNumber: 16) + } + if !_storage._ref.isEmpty { + try visitor.visitSingularStringField(value: _storage._ref, fieldNumber: 17) + } + if let v = _storage._annotations { + try visitor.visitSingularMessageField(value: v, fieldNumber: 18) + } + if _storage._readOnly != false { + try visitor.visitSingularBoolField(value: _storage._readOnly, fieldNumber: 19) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +public struct Discovery_V1_Schemas: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Schemas" + + public var additionalProperties: [Discovery_V1_NamedSchema] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Discovery_V1_Scope: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Scope" + + public var description_p: String = String() + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.description_p) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.description_p.isEmpty { + try visitor.visitSingularStringField(value: self.description_p, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Discovery_V1_Scopes: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Scopes" + + public var additionalProperties: [Discovery_V1_NamedScope] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedMessageField(value: &self.additionalProperties) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.additionalProperties.isEmpty { + try visitor.visitRepeatedMessageField(value: self.additionalProperties, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Discovery_V1_Simple: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Simple" + + public var multipart: Bool = false + + public var path: String = String() + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularBoolField(value: &self.multipart) + case 2: try decoder.decodeSingularStringField(value: &self.path) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if self.multipart != false { + try visitor.visitSingularBoolField(value: self.multipart, fieldNumber: 1) + } + if !self.path.isEmpty { + try visitor.visitSingularStringField(value: self.path, fieldNumber: 2) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +public struct Discovery_V1_StringArray: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".StringArray" + + public var value: [String] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedStringField(value: &self.value) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.value.isEmpty { + try visitor.visitRepeatedStringField(value: self.value, fieldNumber: 1) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +// MARK: - Code below here is support for the SwiftProtobuf runtime. + +fileprivate let _protobuf_package = "discovery.v1" + +extension Discovery_V1_Annotations: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "required"), + ] + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Annotations) -> Bool { + if self.required != other.required {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Any: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "value"), + 2: .same(proto: "yaml"), + ] + + fileprivate class _StorageClass { + var _value: SwiftProtobuf.Google_Protobuf_Any? = nil + var _yaml: String = String() + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _value = source._value + _yaml = source._yaml + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Any) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._value != other_storage._value {return false} + if _storage._yaml != other_storage._yaml {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Auth: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "oauth2"), + ] + + fileprivate class _StorageClass { + var _oauth2: Discovery_V1_Oauth2? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _oauth2 = source._oauth2 + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Auth) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._oauth2 != other_storage._oauth2 {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Document: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "kind"), + 2: .standard(proto: "discovery_version"), + 3: .same(proto: "id"), + 4: .same(proto: "name"), + 5: .same(proto: "version"), + 6: .same(proto: "revision"), + 7: .same(proto: "title"), + 8: .same(proto: "description"), + 9: .same(proto: "icons"), + 10: .standard(proto: "documentation_link"), + 11: .same(proto: "labels"), + 12: .same(proto: "protocol"), + 13: .standard(proto: "base_url"), + 14: .standard(proto: "base_path"), + 15: .standard(proto: "root_url"), + 16: .standard(proto: "service_path"), + 17: .standard(proto: "batch_path"), + 18: .same(proto: "parameters"), + 19: .same(proto: "auth"), + 20: .same(proto: "features"), + 21: .same(proto: "schemas"), + 22: .same(proto: "methods"), + 23: .same(proto: "resources"), + 24: .same(proto: "etag"), + 25: .standard(proto: "owner_domain"), + 26: .standard(proto: "owner_name"), + 27: .standard(proto: "version_module"), + 28: .standard(proto: "canonical_name"), + 29: .standard(proto: "fully_encode_reserved_expansion"), + 30: .standard(proto: "package_path"), + ] + + fileprivate class _StorageClass { + var _kind: String = String() + var _discoveryVersion: String = String() + var _id: String = String() + var _name: String = String() + var _version: String = String() + var _revision: String = String() + var _title: String = String() + var _description_p: String = String() + var _icons: Discovery_V1_Icons? = nil + var _documentationLink: String = String() + var _labels: [String] = [] + var _protocol: String = String() + var _baseURL: String = String() + var _basePath: String = String() + var _rootURL: String = String() + var _servicePath: String = String() + var _batchPath: String = String() + var _parameters: Discovery_V1_Parameters? = nil + var _auth: Discovery_V1_Auth? = nil + var _features: [String] = [] + var _schemas: Discovery_V1_Schemas? = nil + var _methods: Discovery_V1_Methods? = nil + var _resources: Discovery_V1_Resources? = nil + var _etag: String = String() + var _ownerDomain: String = String() + var _ownerName: String = String() + var _versionModule: Bool = false + var _canonicalName: String = String() + var _fullyEncodeReservedExpansion: Bool = false + var _packagePath: String = String() + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _kind = source._kind + _discoveryVersion = source._discoveryVersion + _id = source._id + _name = source._name + _version = source._version + _revision = source._revision + _title = source._title + _description_p = source._description_p + _icons = source._icons + _documentationLink = source._documentationLink + _labels = source._labels + _protocol = source._protocol + _baseURL = source._baseURL + _basePath = source._basePath + _rootURL = source._rootURL + _servicePath = source._servicePath + _batchPath = source._batchPath + _parameters = source._parameters + _auth = source._auth + _features = source._features + _schemas = source._schemas + _methods = source._methods + _resources = source._resources + _etag = source._etag + _ownerDomain = source._ownerDomain + _ownerName = source._ownerName + _versionModule = source._versionModule + _canonicalName = source._canonicalName + _fullyEncodeReservedExpansion = source._fullyEncodeReservedExpansion + _packagePath = source._packagePath + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Document) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._kind != other_storage._kind {return false} + if _storage._discoveryVersion != other_storage._discoveryVersion {return false} + if _storage._id != other_storage._id {return false} + if _storage._name != other_storage._name {return false} + if _storage._version != other_storage._version {return false} + if _storage._revision != other_storage._revision {return false} + if _storage._title != other_storage._title {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._icons != other_storage._icons {return false} + if _storage._documentationLink != other_storage._documentationLink {return false} + if _storage._labels != other_storage._labels {return false} + if _storage._protocol != other_storage._protocol {return false} + if _storage._baseURL != other_storage._baseURL {return false} + if _storage._basePath != other_storage._basePath {return false} + if _storage._rootURL != other_storage._rootURL {return false} + if _storage._servicePath != other_storage._servicePath {return false} + if _storage._batchPath != other_storage._batchPath {return false} + if _storage._parameters != other_storage._parameters {return false} + if _storage._auth != other_storage._auth {return false} + if _storage._features != other_storage._features {return false} + if _storage._schemas != other_storage._schemas {return false} + if _storage._methods != other_storage._methods {return false} + if _storage._resources != other_storage._resources {return false} + if _storage._etag != other_storage._etag {return false} + if _storage._ownerDomain != other_storage._ownerDomain {return false} + if _storage._ownerName != other_storage._ownerName {return false} + if _storage._versionModule != other_storage._versionModule {return false} + if _storage._canonicalName != other_storage._canonicalName {return false} + if _storage._fullyEncodeReservedExpansion != other_storage._fullyEncodeReservedExpansion {return false} + if _storage._packagePath != other_storage._packagePath {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Icons: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "x16"), + 2: .same(proto: "x32"), + ] + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Icons) -> Bool { + if self.x16 != other.x16 {return false} + if self.x32 != other.x32 {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_MediaUpload: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "accept"), + 2: .standard(proto: "max_size"), + 3: .same(proto: "protocols"), + 4: .standard(proto: "supports_subscription"), + ] + + fileprivate class _StorageClass { + var _accept: [String] = [] + var _maxSize: String = String() + var _protocols: Discovery_V1_Protocols? = nil + var _supportsSubscription: Bool = false + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _accept = source._accept + _maxSize = source._maxSize + _protocols = source._protocols + _supportsSubscription = source._supportsSubscription + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_MediaUpload) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._accept != other_storage._accept {return false} + if _storage._maxSize != other_storage._maxSize {return false} + if _storage._protocols != other_storage._protocols {return false} + if _storage._supportsSubscription != other_storage._supportsSubscription {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Method: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "id"), + 2: .same(proto: "path"), + 3: .standard(proto: "http_method"), + 4: .same(proto: "description"), + 5: .same(proto: "parameters"), + 6: .standard(proto: "parameter_order"), + 7: .same(proto: "request"), + 8: .same(proto: "response"), + 9: .same(proto: "scopes"), + 10: .standard(proto: "supports_media_download"), + 11: .standard(proto: "supports_media_upload"), + 12: .standard(proto: "use_media_download_service"), + 13: .standard(proto: "media_upload"), + 14: .standard(proto: "supports_subscription"), + 15: .standard(proto: "flat_path"), + 16: .standard(proto: "etag_required"), + ] + + fileprivate class _StorageClass { + var _id: String = String() + var _path: String = String() + var _httpMethod: String = String() + var _description_p: String = String() + var _parameters: Discovery_V1_Parameters? = nil + var _parameterOrder: [String] = [] + var _request: Discovery_V1_Request? = nil + var _response: Discovery_V1_Response? = nil + var _scopes: [String] = [] + var _supportsMediaDownload: Bool = false + var _supportsMediaUpload: Bool = false + var _useMediaDownloadService: Bool = false + var _mediaUpload: Discovery_V1_MediaUpload? = nil + var _supportsSubscription: Bool = false + var _flatPath: String = String() + var _etagRequired: Bool = false + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _id = source._id + _path = source._path + _httpMethod = source._httpMethod + _description_p = source._description_p + _parameters = source._parameters + _parameterOrder = source._parameterOrder + _request = source._request + _response = source._response + _scopes = source._scopes + _supportsMediaDownload = source._supportsMediaDownload + _supportsMediaUpload = source._supportsMediaUpload + _useMediaDownloadService = source._useMediaDownloadService + _mediaUpload = source._mediaUpload + _supportsSubscription = source._supportsSubscription + _flatPath = source._flatPath + _etagRequired = source._etagRequired + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Method) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._id != other_storage._id {return false} + if _storage._path != other_storage._path {return false} + if _storage._httpMethod != other_storage._httpMethod {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._parameters != other_storage._parameters {return false} + if _storage._parameterOrder != other_storage._parameterOrder {return false} + if _storage._request != other_storage._request {return false} + if _storage._response != other_storage._response {return false} + if _storage._scopes != other_storage._scopes {return false} + if _storage._supportsMediaDownload != other_storage._supportsMediaDownload {return false} + if _storage._supportsMediaUpload != other_storage._supportsMediaUpload {return false} + if _storage._useMediaDownloadService != other_storage._useMediaDownloadService {return false} + if _storage._mediaUpload != other_storage._mediaUpload {return false} + if _storage._supportsSubscription != other_storage._supportsSubscription {return false} + if _storage._flatPath != other_storage._flatPath {return false} + if _storage._etagRequired != other_storage._etagRequired {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Methods: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Methods) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_NamedMethod: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Discovery_V1_Method? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_NamedMethod) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_NamedParameter: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Discovery_V1_Parameter? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_NamedParameter) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_NamedResource: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Discovery_V1_Resource? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_NamedResource) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_NamedSchema: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Discovery_V1_Schema? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_NamedSchema) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_NamedScope: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + fileprivate class _StorageClass { + var _name: String = String() + var _value: Discovery_V1_Scope? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _name = source._name + _value = source._value + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_NamedScope) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._name != other_storage._name {return false} + if _storage._value != other_storage._value {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Oauth2: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "scopes"), + ] + + fileprivate class _StorageClass { + var _scopes: Discovery_V1_Scopes? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _scopes = source._scopes + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Oauth2) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._scopes != other_storage._scopes {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Parameter: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "id"), + 2: .same(proto: "type"), + 3: .standard(proto: "_ref"), + 4: .same(proto: "description"), + 5: .same(proto: "default"), + 6: .same(proto: "required"), + 7: .same(proto: "format"), + 8: .same(proto: "pattern"), + 9: .same(proto: "minimum"), + 10: .same(proto: "maximum"), + 11: .same(proto: "enum"), + 12: .standard(proto: "enum_descriptions"), + 13: .same(proto: "repeated"), + 14: .same(proto: "location"), + 15: .same(proto: "properties"), + 16: .standard(proto: "additional_properties"), + 17: .same(proto: "items"), + 18: .same(proto: "annotations"), + ] + + fileprivate class _StorageClass { + var _id: String = String() + var _type: String = String() + var _ref: String = String() + var _description_p: String = String() + var _default: String = String() + var _required: Bool = false + var _format: String = String() + var _pattern: String = String() + var _minimum: String = String() + var _maximum: String = String() + var _enum: [String] = [] + var _enumDescriptions: [String] = [] + var _repeated: Bool = false + var _location: String = String() + var _properties: Discovery_V1_Schemas? = nil + var _additionalProperties: Discovery_V1_Schema? = nil + var _items: Discovery_V1_Schema? = nil + var _annotations: Discovery_V1_Annotations? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _id = source._id + _type = source._type + _ref = source._ref + _description_p = source._description_p + _default = source._default + _required = source._required + _format = source._format + _pattern = source._pattern + _minimum = source._minimum + _maximum = source._maximum + _enum = source._enum + _enumDescriptions = source._enumDescriptions + _repeated = source._repeated + _location = source._location + _properties = source._properties + _additionalProperties = source._additionalProperties + _items = source._items + _annotations = source._annotations + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Parameter) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._id != other_storage._id {return false} + if _storage._type != other_storage._type {return false} + if _storage._ref != other_storage._ref {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._default != other_storage._default {return false} + if _storage._required != other_storage._required {return false} + if _storage._format != other_storage._format {return false} + if _storage._pattern != other_storage._pattern {return false} + if _storage._minimum != other_storage._minimum {return false} + if _storage._maximum != other_storage._maximum {return false} + if _storage._enum != other_storage._enum {return false} + if _storage._enumDescriptions != other_storage._enumDescriptions {return false} + if _storage._repeated != other_storage._repeated {return false} + if _storage._location != other_storage._location {return false} + if _storage._properties != other_storage._properties {return false} + if _storage._additionalProperties != other_storage._additionalProperties {return false} + if _storage._items != other_storage._items {return false} + if _storage._annotations != other_storage._annotations {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Parameters: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Parameters) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Protocols: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "simple"), + 2: .same(proto: "resumable"), + ] + + fileprivate class _StorageClass { + var _simple: Discovery_V1_Simple? = nil + var _resumable: Discovery_V1_Resumable? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _simple = source._simple + _resumable = source._resumable + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Protocols) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._simple != other_storage._simple {return false} + if _storage._resumable != other_storage._resumable {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Request: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "_ref"), + 2: .standard(proto: "parameter_name"), + ] + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Request) -> Bool { + if self.ref != other.ref {return false} + if self.parameterName != other.parameterName {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Resource: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "methods"), + 2: .same(proto: "resources"), + ] + + fileprivate class _StorageClass { + var _methods: Discovery_V1_Methods? = nil + var _resources: Discovery_V1_Resources? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _methods = source._methods + _resources = source._resources + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Resource) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._methods != other_storage._methods {return false} + if _storage._resources != other_storage._resources {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Resources: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Resources) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Response: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "_ref"), + ] + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Response) -> Bool { + if self.ref != other.ref {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Resumable: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "multipart"), + 2: .same(proto: "path"), + ] + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Resumable) -> Bool { + if self.multipart != other.multipart {return false} + if self.path != other.path {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Schema: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "id"), + 2: .same(proto: "type"), + 3: .same(proto: "description"), + 4: .same(proto: "default"), + 5: .same(proto: "required"), + 6: .same(proto: "format"), + 7: .same(proto: "pattern"), + 8: .same(proto: "minimum"), + 9: .same(proto: "maximum"), + 10: .same(proto: "enum"), + 11: .standard(proto: "enum_descriptions"), + 12: .same(proto: "repeated"), + 13: .same(proto: "location"), + 14: .same(proto: "properties"), + 15: .standard(proto: "additional_properties"), + 16: .same(proto: "items"), + 17: .standard(proto: "_ref"), + 18: .same(proto: "annotations"), + 19: .standard(proto: "read_only"), + ] + + fileprivate class _StorageClass { + var _id: String = String() + var _type: String = String() + var _description_p: String = String() + var _default: String = String() + var _required: Bool = false + var _format: String = String() + var _pattern: String = String() + var _minimum: String = String() + var _maximum: String = String() + var _enum: [String] = [] + var _enumDescriptions: [String] = [] + var _repeated: Bool = false + var _location: String = String() + var _properties: Discovery_V1_Schemas? = nil + var _additionalProperties: Discovery_V1_Schema? = nil + var _items: Discovery_V1_Schema? = nil + var _ref: String = String() + var _annotations: Discovery_V1_Annotations? = nil + var _readOnly: Bool = false + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _id = source._id + _type = source._type + _description_p = source._description_p + _default = source._default + _required = source._required + _format = source._format + _pattern = source._pattern + _minimum = source._minimum + _maximum = source._maximum + _enum = source._enum + _enumDescriptions = source._enumDescriptions + _repeated = source._repeated + _location = source._location + _properties = source._properties + _additionalProperties = source._additionalProperties + _items = source._items + _ref = source._ref + _annotations = source._annotations + _readOnly = source._readOnly + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Schema) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._id != other_storage._id {return false} + if _storage._type != other_storage._type {return false} + if _storage._description_p != other_storage._description_p {return false} + if _storage._default != other_storage._default {return false} + if _storage._required != other_storage._required {return false} + if _storage._format != other_storage._format {return false} + if _storage._pattern != other_storage._pattern {return false} + if _storage._minimum != other_storage._minimum {return false} + if _storage._maximum != other_storage._maximum {return false} + if _storage._enum != other_storage._enum {return false} + if _storage._enumDescriptions != other_storage._enumDescriptions {return false} + if _storage._repeated != other_storage._repeated {return false} + if _storage._location != other_storage._location {return false} + if _storage._properties != other_storage._properties {return false} + if _storage._additionalProperties != other_storage._additionalProperties {return false} + if _storage._items != other_storage._items {return false} + if _storage._ref != other_storage._ref {return false} + if _storage._annotations != other_storage._annotations {return false} + if _storage._readOnly != other_storage._readOnly {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Schemas: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Schemas) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Scope: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "description"), + ] + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Scope) -> Bool { + if self.description_p != other.description_p {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Scopes: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "additional_properties"), + ] + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Scopes) -> Bool { + if self.additionalProperties != other.additionalProperties {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_Simple: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "multipart"), + 2: .same(proto: "path"), + ] + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_Simple) -> Bool { + if self.multipart != other.multipart {return false} + if self.path != other.path {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Discovery_V1_StringArray: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "value"), + ] + + public func _protobuf_generated_isEqualTo(other: Discovery_V1_StringArray) -> Bool { + if self.value != other.value {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-sample/Sources/Gnostic/plugin.pb.swift b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-sample/Sources/Gnostic/plugin.pb.swift new file mode 100644 index 000000000..9e19a082f --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-sample/Sources/Gnostic/plugin.pb.swift @@ -0,0 +1,499 @@ +// DO NOT EDIT. +// +// Generated by the Swift generator plugin for the protocol buffer compiler. +// Source: github.com/googleapis/gnostic/plugins/plugin.proto +// +// For information on using the generated types, please see the documenation: +// https://github.com/apple/swift-protobuf/ + +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// gnostic can be extended with plugins. +// A plugin is uset a program that reads a Request from stdin +// and writes a Response to stdout. +// +// A plugin executable needs only to be placed somewhere in the path. The +// plugin should be named "gnostic_$NAME", and will then be used when the +// flag "--${NAME}_out" is passed to gnostic. + +import Foundation +import SwiftProtobuf + +// If the compiler emits an error on this type, it is because this file +// was generated by a version of the `protoc` Swift plug-in that is +// incompatible with the version of SwiftProtobuf to which you are linking. +// Please ensure that your are building against the same version of the API +// that was used to generate this file. +fileprivate struct _GeneratedWithProtocGenSwiftVersion: SwiftProtobuf.ProtobufAPIVersionCheck { + struct _2: SwiftProtobuf.ProtobufAPIVersion_2 {} + typealias Version = _2 +} + +/// The version number of gnostic. +public struct Gnostic_Plugin_V1_Version: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Version" + + public var major: Int32 = 0 + + public var minor: Int32 = 0 + + public var patch: Int32 = 0 + + /// A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should + /// be empty for mainline stable releases. + public var suffix: String = String() + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularInt32Field(value: &self.major) + case 2: try decoder.decodeSingularInt32Field(value: &self.minor) + case 3: try decoder.decodeSingularInt32Field(value: &self.patch) + case 4: try decoder.decodeSingularStringField(value: &self.suffix) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if self.major != 0 { + try visitor.visitSingularInt32Field(value: self.major, fieldNumber: 1) + } + if self.minor != 0 { + try visitor.visitSingularInt32Field(value: self.minor, fieldNumber: 2) + } + if self.patch != 0 { + try visitor.visitSingularInt32Field(value: self.patch, fieldNumber: 3) + } + if !self.suffix.isEmpty { + try visitor.visitSingularStringField(value: self.suffix, fieldNumber: 4) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// A parameter passed to the plugin from (or through) gnostic. +public struct Gnostic_Plugin_V1_Parameter: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Parameter" + + /// The name of the parameter as specified in the option string + public var name: String = String() + + /// The parameter value as specified in the option string + public var value: String = String() + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.name) + case 2: try decoder.decodeSingularStringField(value: &self.value) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.name.isEmpty { + try visitor.visitSingularStringField(value: self.name, fieldNumber: 1) + } + if !self.value.isEmpty { + try visitor.visitSingularStringField(value: self.value, fieldNumber: 2) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// An encoded Request is written to the plugin's stdin. +public struct Gnostic_Plugin_V1_Request: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Request" + + /// filename or URL of the original source document + public var sourceName: String { + get {return _storage._sourceName} + set {_uniqueStorage()._sourceName = newValue} + } + + /// Output path specified in the plugin invocation. + public var outputPath: String { + get {return _storage._outputPath} + set {_uniqueStorage()._outputPath = newValue} + } + + /// Plugin parameters parsed from the invocation string. + public var parameters: [Gnostic_Plugin_V1_Parameter] { + get {return _storage._parameters} + set {_uniqueStorage()._parameters = newValue} + } + + /// The version number of gnostic. + public var compilerVersion: Gnostic_Plugin_V1_Version { + get {return _storage._compilerVersion ?? Gnostic_Plugin_V1_Version()} + set {_uniqueStorage()._compilerVersion = newValue} + } + /// Returns true if `compilerVersion` has been explicitly set. + public var hasCompilerVersion: Bool {return _storage._compilerVersion != nil} + /// Clears the value of `compilerVersion`. Subsequent reads from it will return its default value. + public mutating func clearCompilerVersion() {_storage._compilerVersion = nil} + + /// OpenAPI v2 API representation + public var openapi2: Openapi_V2_Document { + get {return _storage._openapi2 ?? Openapi_V2_Document()} + set {_uniqueStorage()._openapi2 = newValue} + } + /// Returns true if `openapi2` has been explicitly set. + public var hasOpenapi2: Bool {return _storage._openapi2 != nil} + /// Clears the value of `openapi2`. Subsequent reads from it will return its default value. + public mutating func clearOpenapi2() {_storage._openapi2 = nil} + + /// OpenAPI v3 API representation + public var openapi3: Openapi_V3_Document { + get {return _storage._openapi3 ?? Openapi_V3_Document()} + set {_uniqueStorage()._openapi3 = newValue} + } + /// Returns true if `openapi3` has been explicitly set. + public var hasOpenapi3: Bool {return _storage._openapi3 != nil} + /// Clears the value of `openapi3`. Subsequent reads from it will return its default value. + public mutating func clearOpenapi3() {_storage._openapi3 = nil} + + /// Discovery API representation + public var discovery: Discovery_V1_Document { + get {return _storage._discovery ?? Discovery_V1_Document()} + set {_uniqueStorage()._discovery = newValue} + } + /// Returns true if `discovery` has been explicitly set. + public var hasDiscovery: Bool {return _storage._discovery != nil} + /// Clears the value of `discovery`. Subsequent reads from it will return its default value. + public mutating func clearDiscovery() {_storage._discovery = nil} + + /// generated code surface representation + public var surface: Surface_V1_Model { + get {return _storage._surface ?? Surface_V1_Model()} + set {_uniqueStorage()._surface = newValue} + } + /// Returns true if `surface` has been explicitly set. + public var hasSurface: Bool {return _storage._surface != nil} + /// Clears the value of `surface`. Subsequent reads from it will return its default value. + public mutating func clearSurface() {_storage._surface = nil} + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + _ = _uniqueStorage() + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &_storage._sourceName) + case 2: try decoder.decodeSingularStringField(value: &_storage._outputPath) + case 3: try decoder.decodeRepeatedMessageField(value: &_storage._parameters) + case 4: try decoder.decodeSingularMessageField(value: &_storage._compilerVersion) + case 5: try decoder.decodeSingularMessageField(value: &_storage._openapi2) + case 6: try decoder.decodeSingularMessageField(value: &_storage._openapi3) + case 7: try decoder.decodeSingularMessageField(value: &_storage._discovery) + case 8: try decoder.decodeSingularMessageField(value: &_storage._surface) + default: break + } + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + try withExtendedLifetime(_storage) { (_storage: _StorageClass) in + if !_storage._sourceName.isEmpty { + try visitor.visitSingularStringField(value: _storage._sourceName, fieldNumber: 1) + } + if !_storage._outputPath.isEmpty { + try visitor.visitSingularStringField(value: _storage._outputPath, fieldNumber: 2) + } + if !_storage._parameters.isEmpty { + try visitor.visitRepeatedMessageField(value: _storage._parameters, fieldNumber: 3) + } + if let v = _storage._compilerVersion { + try visitor.visitSingularMessageField(value: v, fieldNumber: 4) + } + if let v = _storage._openapi2 { + try visitor.visitSingularMessageField(value: v, fieldNumber: 5) + } + if let v = _storage._openapi3 { + try visitor.visitSingularMessageField(value: v, fieldNumber: 6) + } + if let v = _storage._discovery { + try visitor.visitSingularMessageField(value: v, fieldNumber: 7) + } + if let v = _storage._surface { + try visitor.visitSingularMessageField(value: v, fieldNumber: 8) + } + } + try unknownFields.traverse(visitor: &visitor) + } + + fileprivate var _storage = _StorageClass.defaultInstance +} + +/// The plugin writes an encoded Response to stdout. +public struct Gnostic_Plugin_V1_Response: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Response" + + /// Error message. If non-empty, the plugin failed. + /// The plugin process should exit with status code zero + /// even if it reports an error in this way. + /// + /// This should be used to indicate errors which prevent the plugin from + /// operating as intended. Errors which indicate a problem in openapic + /// itself -- such as the input Document being unparseable -- should be + /// reported by writing a message to stderr and exiting with a non-zero + /// status code. + public var errors: [String] = [] + + /// file output, each file will be written by openapic to an appropriate location. + public var files: [Gnostic_Plugin_V1_File] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeRepeatedStringField(value: &self.errors) + case 2: try decoder.decodeRepeatedMessageField(value: &self.files) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.errors.isEmpty { + try visitor.visitRepeatedStringField(value: self.errors, fieldNumber: 1) + } + if !self.files.isEmpty { + try visitor.visitRepeatedMessageField(value: self.files, fieldNumber: 2) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// File describes a file generated by a plugin. +public struct Gnostic_Plugin_V1_File: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".File" + + /// name of the file + public var name: String = String() + + /// data to be written to the file + public var data: Data = SwiftProtobuf.Internal.emptyData + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.name) + case 2: try decoder.decodeSingularBytesField(value: &self.data) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.name.isEmpty { + try visitor.visitSingularStringField(value: self.name, fieldNumber: 1) + } + if !self.data.isEmpty { + try visitor.visitSingularBytesField(value: self.data, fieldNumber: 2) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +// MARK: - Code below here is support for the SwiftProtobuf runtime. + +fileprivate let _protobuf_package = "gnostic.plugin.v1" + +extension Gnostic_Plugin_V1_Version: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "major"), + 2: .same(proto: "minor"), + 3: .same(proto: "patch"), + 4: .same(proto: "suffix"), + ] + + public func _protobuf_generated_isEqualTo(other: Gnostic_Plugin_V1_Version) -> Bool { + if self.major != other.major {return false} + if self.minor != other.minor {return false} + if self.patch != other.patch {return false} + if self.suffix != other.suffix {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Gnostic_Plugin_V1_Parameter: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "value"), + ] + + public func _protobuf_generated_isEqualTo(other: Gnostic_Plugin_V1_Parameter) -> Bool { + if self.name != other.name {return false} + if self.value != other.value {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Gnostic_Plugin_V1_Request: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .standard(proto: "source_name"), + 2: .standard(proto: "output_path"), + 3: .same(proto: "parameters"), + 4: .standard(proto: "compiler_version"), + 5: .same(proto: "openapi2"), + 6: .same(proto: "openapi3"), + 7: .same(proto: "discovery"), + 8: .same(proto: "surface"), + ] + + fileprivate class _StorageClass { + var _sourceName: String = String() + var _outputPath: String = String() + var _parameters: [Gnostic_Plugin_V1_Parameter] = [] + var _compilerVersion: Gnostic_Plugin_V1_Version? = nil + var _openapi2: Openapi_V2_Document? = nil + var _openapi3: Openapi_V3_Document? = nil + var _discovery: Discovery_V1_Document? = nil + var _surface: Surface_V1_Model? = nil + + static let defaultInstance = _StorageClass() + + private init() {} + + init(copying source: _StorageClass) { + _sourceName = source._sourceName + _outputPath = source._outputPath + _parameters = source._parameters + _compilerVersion = source._compilerVersion + _openapi2 = source._openapi2 + _openapi3 = source._openapi3 + _discovery = source._discovery + _surface = source._surface + } + } + + fileprivate mutating func _uniqueStorage() -> _StorageClass { + if !isKnownUniquelyReferenced(&_storage) { + _storage = _StorageClass(copying: _storage) + } + return _storage + } + + public func _protobuf_generated_isEqualTo(other: Gnostic_Plugin_V1_Request) -> Bool { + if _storage !== other._storage { + let storagesAreEqual: Bool = withExtendedLifetime((_storage, other._storage)) { (_storage, other_storage) in + if _storage._sourceName != other_storage._sourceName {return false} + if _storage._outputPath != other_storage._outputPath {return false} + if _storage._parameters != other_storage._parameters {return false} + if _storage._compilerVersion != other_storage._compilerVersion {return false} + if _storage._openapi2 != other_storage._openapi2 {return false} + if _storage._openapi3 != other_storage._openapi3 {return false} + if _storage._discovery != other_storage._discovery {return false} + if _storage._surface != other_storage._surface {return false} + return true + } + if !storagesAreEqual {return false} + } + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Gnostic_Plugin_V1_Response: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "errors"), + 2: .same(proto: "files"), + ] + + public func _protobuf_generated_isEqualTo(other: Gnostic_Plugin_V1_Response) -> Bool { + if self.errors != other.errors {return false} + if self.files != other.files {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Gnostic_Plugin_V1_File: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "data"), + ] + + public func _protobuf_generated_isEqualTo(other: Gnostic_Plugin_V1_File) -> Bool { + if self.name != other.name {return false} + if self.data != other.data {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-sample/Sources/Gnostic/surface.pb.swift b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-sample/Sources/Gnostic/surface.pb.swift new file mode 100644 index 000000000..f61b8e28c --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-sample/Sources/Gnostic/surface.pb.swift @@ -0,0 +1,579 @@ +// DO NOT EDIT. +// +// Generated by the Swift generator plugin for the protocol buffer compiler. +// Source: github.com/googleapis/gnostic/surface/surface.proto +// +// For information on using the generated types, please see the documenation: +// https://github.com/apple/swift-protobuf/ + +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Model an API surface for code generation. + +import Foundation +import SwiftProtobuf + +// If the compiler emits an error on this type, it is because this file +// was generated by a version of the `protoc` Swift plug-in that is +// incompatible with the version of SwiftProtobuf to which you are linking. +// Please ensure that your are building against the same version of the API +// that was used to generate this file. +fileprivate struct _GeneratedWithProtocGenSwiftVersion: SwiftProtobuf.ProtobufAPIVersionCheck { + struct _2: SwiftProtobuf.ProtobufAPIVersion_2 {} + typealias Version = _2 +} + +public enum Surface_V1_FieldKind: SwiftProtobuf.Enum { + public typealias RawValue = Int + case scalar // = 0 + case map // = 1 + case array // = 2 + case reference // = 3 + case UNRECOGNIZED(Int) + + public init() { + self = .scalar + } + + public init?(rawValue: Int) { + switch rawValue { + case 0: self = .scalar + case 1: self = .map + case 2: self = .array + case 3: self = .reference + default: self = .UNRECOGNIZED(rawValue) + } + } + + public var rawValue: Int { + switch self { + case .scalar: return 0 + case .map: return 1 + case .array: return 2 + case .reference: return 3 + case .UNRECOGNIZED(let i): return i + } + } + +} + +public enum Surface_V1_TypeKind: SwiftProtobuf.Enum { + public typealias RawValue = Int + + /// implement with named fields + case `struct` // = 0 + + /// implement with a map + case object // = 1 + case UNRECOGNIZED(Int) + + public init() { + self = .struct + } + + public init?(rawValue: Int) { + switch rawValue { + case 0: self = .struct + case 1: self = .object + default: self = .UNRECOGNIZED(rawValue) + } + } + + public var rawValue: Int { + switch self { + case .struct: return 0 + case .object: return 1 + case .UNRECOGNIZED(let i): return i + } + } + +} + +public enum Surface_V1_Position: SwiftProtobuf.Enum { + public typealias RawValue = Int + case body // = 0 + case header // = 1 + case formdata // = 2 + case query // = 3 + case path // = 4 + case UNRECOGNIZED(Int) + + public init() { + self = .body + } + + public init?(rawValue: Int) { + switch rawValue { + case 0: self = .body + case 1: self = .header + case 2: self = .formdata + case 3: self = .query + case 4: self = .path + default: self = .UNRECOGNIZED(rawValue) + } + } + + public var rawValue: Int { + switch self { + case .body: return 0 + case .header: return 1 + case .formdata: return 2 + case .query: return 3 + case .path: return 4 + case .UNRECOGNIZED(let i): return i + } + } + +} + +/// Field is a field in a definition and can be associated with +/// a position in a request structure. +public struct Surface_V1_Field: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Field" + + /// the name as specified in the API description + public var name: String = String() + + /// the specified content type of the field + public var type: String = String() + + /// what kind of thing is this field? scalar, reference, array, map of strings to the specified type + public var kind: Surface_V1_FieldKind = .scalar + + /// the specified format of the field + public var format: String = String() + + /// "body", "header", "formdata", "query", or "path" + public var position: Surface_V1_Position = .body + + /// the programming-language native type of the field + public var nativeType: String = String() + + /// the name to use for a data structure field + public var fieldName: String = String() + + /// the name to use for a function parameter + public var parameterName: String = String() + + /// true if this field should be serialized (to JSON, etc) + public var serialize: Bool = false + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.name) + case 2: try decoder.decodeSingularStringField(value: &self.type) + case 3: try decoder.decodeSingularEnumField(value: &self.kind) + case 4: try decoder.decodeSingularStringField(value: &self.format) + case 5: try decoder.decodeSingularEnumField(value: &self.position) + case 6: try decoder.decodeSingularStringField(value: &self.nativeType) + case 7: try decoder.decodeSingularStringField(value: &self.fieldName) + case 8: try decoder.decodeSingularStringField(value: &self.parameterName) + case 9: try decoder.decodeSingularBoolField(value: &self.serialize) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.name.isEmpty { + try visitor.visitSingularStringField(value: self.name, fieldNumber: 1) + } + if !self.type.isEmpty { + try visitor.visitSingularStringField(value: self.type, fieldNumber: 2) + } + if self.kind != .scalar { + try visitor.visitSingularEnumField(value: self.kind, fieldNumber: 3) + } + if !self.format.isEmpty { + try visitor.visitSingularStringField(value: self.format, fieldNumber: 4) + } + if self.position != .body { + try visitor.visitSingularEnumField(value: self.position, fieldNumber: 5) + } + if !self.nativeType.isEmpty { + try visitor.visitSingularStringField(value: self.nativeType, fieldNumber: 6) + } + if !self.fieldName.isEmpty { + try visitor.visitSingularStringField(value: self.fieldName, fieldNumber: 7) + } + if !self.parameterName.isEmpty { + try visitor.visitSingularStringField(value: self.parameterName, fieldNumber: 8) + } + if self.serialize != false { + try visitor.visitSingularBoolField(value: self.serialize, fieldNumber: 9) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Type typically corresponds to a definition, parameter, or response +/// in an API and is represented by a type in generated code. +public struct Surface_V1_Type: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Type" + + /// the name to use for the type + public var name: String = String() + + /// a meta-description of the type (struct, map, etc) + public var kind: Surface_V1_TypeKind = .struct + + /// a comment describing the type + public var description_p: String = String() + + /// if the type is a map, this is its content type + public var contentType: String = String() + + /// the fields of the type + public var fields: [Surface_V1_Field] = [] + + /// language-specific type name + public var typeName: String = String() + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.name) + case 2: try decoder.decodeSingularEnumField(value: &self.kind) + case 3: try decoder.decodeSingularStringField(value: &self.description_p) + case 4: try decoder.decodeSingularStringField(value: &self.contentType) + case 5: try decoder.decodeRepeatedMessageField(value: &self.fields) + case 6: try decoder.decodeSingularStringField(value: &self.typeName) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.name.isEmpty { + try visitor.visitSingularStringField(value: self.name, fieldNumber: 1) + } + if self.kind != .struct { + try visitor.visitSingularEnumField(value: self.kind, fieldNumber: 2) + } + if !self.description_p.isEmpty { + try visitor.visitSingularStringField(value: self.description_p, fieldNumber: 3) + } + if !self.contentType.isEmpty { + try visitor.visitSingularStringField(value: self.contentType, fieldNumber: 4) + } + if !self.fields.isEmpty { + try visitor.visitRepeatedMessageField(value: self.fields, fieldNumber: 5) + } + if !self.typeName.isEmpty { + try visitor.visitSingularStringField(value: self.typeName, fieldNumber: 6) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Method is an operation of an API and typically has associated client and server code. +public struct Surface_V1_Method: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Method" + + /// Operation ID + public var operation: String = String() + + /// HTTP path + public var path: String = String() + + /// HTTP method name + public var method: String = String() + + /// description of method + public var description_p: String = String() + + /// Operation name, possibly generated from method and path + public var name: String = String() + + /// name of the generated handler + public var handlerName: String = String() + + /// name of the processing function in the service interface + public var processorName: String = String() + + /// name of client + public var clientName: String = String() + + /// parameters (input), with fields corresponding to input parameters + public var parametersTypeName: String = String() + + /// responses (output), with fields corresponding to possible response values + public var responsesTypeName: String = String() + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.operation) + case 2: try decoder.decodeSingularStringField(value: &self.path) + case 3: try decoder.decodeSingularStringField(value: &self.method) + case 4: try decoder.decodeSingularStringField(value: &self.description_p) + case 5: try decoder.decodeSingularStringField(value: &self.name) + case 6: try decoder.decodeSingularStringField(value: &self.handlerName) + case 7: try decoder.decodeSingularStringField(value: &self.processorName) + case 8: try decoder.decodeSingularStringField(value: &self.clientName) + case 9: try decoder.decodeSingularStringField(value: &self.parametersTypeName) + case 10: try decoder.decodeSingularStringField(value: &self.responsesTypeName) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.operation.isEmpty { + try visitor.visitSingularStringField(value: self.operation, fieldNumber: 1) + } + if !self.path.isEmpty { + try visitor.visitSingularStringField(value: self.path, fieldNumber: 2) + } + if !self.method.isEmpty { + try visitor.visitSingularStringField(value: self.method, fieldNumber: 3) + } + if !self.description_p.isEmpty { + try visitor.visitSingularStringField(value: self.description_p, fieldNumber: 4) + } + if !self.name.isEmpty { + try visitor.visitSingularStringField(value: self.name, fieldNumber: 5) + } + if !self.handlerName.isEmpty { + try visitor.visitSingularStringField(value: self.handlerName, fieldNumber: 6) + } + if !self.processorName.isEmpty { + try visitor.visitSingularStringField(value: self.processorName, fieldNumber: 7) + } + if !self.clientName.isEmpty { + try visitor.visitSingularStringField(value: self.clientName, fieldNumber: 8) + } + if !self.parametersTypeName.isEmpty { + try visitor.visitSingularStringField(value: self.parametersTypeName, fieldNumber: 9) + } + if !self.responsesTypeName.isEmpty { + try visitor.visitSingularStringField(value: self.responsesTypeName, fieldNumber: 10) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +/// Model represents an API for code generation. +public struct Surface_V1_Model: SwiftProtobuf.Message { + public static let protoMessageName: String = _protobuf_package + ".Model" + + /// a free-form title for the API + public var name: String = String() + + /// the types used by the API + public var types: [Surface_V1_Type] = [] + + /// the methods (functions) of the API + public var methods: [Surface_V1_Method] = [] + + public var unknownFields = SwiftProtobuf.UnknownStorage() + + public init() {} + + /// Used by the decoding initializers in the SwiftProtobuf library, not generally + /// used directly. `init(serializedData:)`, `init(jsonUTF8Data:)`, and other decoding + /// initializers are defined in the SwiftProtobuf library. See the Message and + /// Message+*Additions` files. + public mutating func decodeMessage(decoder: inout D) throws { + while let fieldNumber = try decoder.nextFieldNumber() { + switch fieldNumber { + case 1: try decoder.decodeSingularStringField(value: &self.name) + case 2: try decoder.decodeRepeatedMessageField(value: &self.types) + case 3: try decoder.decodeRepeatedMessageField(value: &self.methods) + default: break + } + } + } + + /// Used by the encoding methods of the SwiftProtobuf library, not generally + /// used directly. `Message.serializedData()`, `Message.jsonUTF8Data()`, and + /// other serializer methods are defined in the SwiftProtobuf library. See the + /// `Message` and `Message+*Additions` files. + public func traverse(visitor: inout V) throws { + if !self.name.isEmpty { + try visitor.visitSingularStringField(value: self.name, fieldNumber: 1) + } + if !self.types.isEmpty { + try visitor.visitRepeatedMessageField(value: self.types, fieldNumber: 2) + } + if !self.methods.isEmpty { + try visitor.visitRepeatedMessageField(value: self.methods, fieldNumber: 3) + } + try unknownFields.traverse(visitor: &visitor) + } +} + +// MARK: - Code below here is support for the SwiftProtobuf runtime. + +fileprivate let _protobuf_package = "surface.v1" + +extension Surface_V1_FieldKind: SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 0: .same(proto: "SCALAR"), + 1: .same(proto: "MAP"), + 2: .same(proto: "ARRAY"), + 3: .same(proto: "REFERENCE"), + ] +} + +extension Surface_V1_TypeKind: SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 0: .same(proto: "STRUCT"), + 1: .same(proto: "OBJECT"), + ] +} + +extension Surface_V1_Position: SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 0: .same(proto: "BODY"), + 1: .same(proto: "HEADER"), + 2: .same(proto: "FORMDATA"), + 3: .same(proto: "QUERY"), + 4: .same(proto: "PATH"), + ] +} + +extension Surface_V1_Field: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "type"), + 3: .same(proto: "kind"), + 4: .same(proto: "format"), + 5: .same(proto: "position"), + 6: .same(proto: "nativeType"), + 7: .same(proto: "fieldName"), + 8: .same(proto: "parameterName"), + 9: .same(proto: "serialize"), + ] + + public func _protobuf_generated_isEqualTo(other: Surface_V1_Field) -> Bool { + if self.name != other.name {return false} + if self.type != other.type {return false} + if self.kind != other.kind {return false} + if self.format != other.format {return false} + if self.position != other.position {return false} + if self.nativeType != other.nativeType {return false} + if self.fieldName != other.fieldName {return false} + if self.parameterName != other.parameterName {return false} + if self.serialize != other.serialize {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Surface_V1_Type: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "kind"), + 3: .same(proto: "description"), + 4: .same(proto: "contentType"), + 5: .same(proto: "fields"), + 6: .same(proto: "typeName"), + ] + + public func _protobuf_generated_isEqualTo(other: Surface_V1_Type) -> Bool { + if self.name != other.name {return false} + if self.kind != other.kind {return false} + if self.description_p != other.description_p {return false} + if self.contentType != other.contentType {return false} + if self.fields != other.fields {return false} + if self.typeName != other.typeName {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Surface_V1_Method: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "operation"), + 2: .same(proto: "path"), + 3: .same(proto: "method"), + 4: .same(proto: "description"), + 5: .same(proto: "name"), + 6: .same(proto: "handlerName"), + 7: .same(proto: "processorName"), + 8: .same(proto: "clientName"), + 9: .same(proto: "parametersTypeName"), + 10: .same(proto: "responsesTypeName"), + ] + + public func _protobuf_generated_isEqualTo(other: Surface_V1_Method) -> Bool { + if self.operation != other.operation {return false} + if self.path != other.path {return false} + if self.method != other.method {return false} + if self.description_p != other.description_p {return false} + if self.name != other.name {return false} + if self.handlerName != other.handlerName {return false} + if self.processorName != other.processorName {return false} + if self.clientName != other.clientName {return false} + if self.parametersTypeName != other.parametersTypeName {return false} + if self.responsesTypeName != other.responsesTypeName {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} + +extension Surface_V1_Model: SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { + public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ + 1: .same(proto: "name"), + 2: .same(proto: "types"), + 3: .same(proto: "methods"), + ] + + public func _protobuf_generated_isEqualTo(other: Surface_V1_Model) -> Bool { + if self.name != other.name {return false} + if self.types != other.types {return false} + if self.methods != other.methods {return false} + if unknownFields != other.unknownFields {return false} + return true + } +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-sample/Sources/gnostic-swift-sample/io.swift b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-sample/Sources/gnostic-swift-sample/io.swift new file mode 100644 index 000000000..0095ad40c --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-sample/Sources/gnostic-swift-sample/io.swift @@ -0,0 +1,100 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import Foundation + +// The I/O code below is derived from Apple's swift-protobuf project. +// https://github.com/apple/swift-protobuf +// BEGIN swift-protobuf derivation + +#if os(Linux) + import Glibc +#else + import Darwin.C +#endif + +enum PluginError: Error { + /// Raised for any errors reading the input + case readFailure +} + +// Alias clib's write() so Stdout.write(bytes:) can call it. +private let _write = write + +class Stdin { + static func readall() throws -> Data { + let fd: Int32 = 0 + let buffSize = 32 + var buff = [UInt8]() + while true { + var fragment = [UInt8](repeating: 0, count: buffSize) + let count = read(fd, &fragment, buffSize) + if count < 0 { + throw PluginError.readFailure + } + if count < buffSize { + buff += fragment[0..) -> () in + _ = _write(1, p, bytes.count) + } + } +} + +struct CodePrinter { + private(set) var content = "" + private var currentIndentDepth = 0 + private var currentIndent = "" + private var atLineStart = true + + mutating func print(_ text: String...) { + for t in text { + for c in t.characters { + if c == "\n" { + content.append(c) + atLineStart = true + } else { + if atLineStart { + content.append(currentIndent) + atLineStart = false + } + content.append(c) + } + } + } + } + + mutating private func resetIndent() { + currentIndent = (0.. String in return " " } .joined(separator:"") + } + + mutating func indent() { + currentIndentDepth += 1 + resetIndent() + } + mutating func outdent() { + currentIndentDepth -= 1 + resetIndent() + } +} + +// END swift-protobuf derivation diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-sample/Sources/gnostic-swift-sample/main.swift b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-sample/Sources/gnostic-swift-sample/main.swift new file mode 100644 index 000000000..e7bec7a4a --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-sample/Sources/gnostic-swift-sample/main.swift @@ -0,0 +1,72 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import Foundation +import Gnostic + +func printDocument(document:Openapi_V2_Document, + name:String) -> String { + var code = CodePrinter() + code.print("READING \(name)\n") + code.print("Swagger: \(document.swagger)\n") + code.print("Host: \(document.host)\n") + code.print("BasePath: \(document.basePath)\n") + if document.hasInfo { + code.print("Info:\n") + code.indent() + if document.info.title != "" { + code.print("Title: \(document.info.title)\n") + } + if document.info.description_p != "" { + code.print("Description: \(document.info.description_p)\n") + } + if document.info.version != "" { + code.print("Version: \(document.info.version)\n") + } + code.outdent() + } + code.print("Paths:\n") + code.indent() + for pair in document.paths.path { + let v = pair.value + if v.hasGet { + code.print("GET \(pair.name)\n") + } + if v.hasPost { + code.print("POST \(pair.name)\n") + } + } + code.outdent() + return code.content +} + +func main() throws { + var response = Gnostic_Plugin_V1_Response() + let rawRequest = try Stdin.readall() + let request = try Gnostic_Plugin_V1_Request(serializedData: rawRequest) + if request.hasOpenapi2 { + let document = request.openapi2 + let report = printDocument(document:document, name:request.sourceName) + if let reportData = report.data(using:.utf8) { + var file = Gnostic_Plugin_V1_File() + file.name = "report.txt" + file.data = reportData + response.files.append(file) + } + } + let serializedResponse = try response.serializedData() + Stdout.write(bytes: serializedResponse) +} + +try main() diff --git a/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-sample/compile-protos b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-sample/compile-protos new file mode 100644 index 000000000..ef7cc52f6 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/gnostic-swift-sample/compile-protos @@ -0,0 +1,52 @@ +#!/bin/sh +# +# Copyright 2017 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Use this script to run protoc and swift-proto to generate +# support code for gnostic protos. + +GNOSTIC=$GOPATH/src/github.com/googleapis/gnostic + +PROTOS=( + plugins/plugin.proto + OpenAPIv2/OpenAPIv2.proto + OpenAPIv3/OpenAPIv3.proto + surface/surface.proto + discovery/discovery.proto +) + +# remove old compiled pb files +rm -rf Sources/Gnostic/*.pb.swift + +# remove any prior compilations +rm -rf Sources/Gnostic/github.com + +# compile protos +for proto in "${PROTOS[@]}" +do + echo "COMPILING $proto" + protoc $GNOSTIC/$proto \ + --swift_opt=Visibility=Public \ + --swift_out=Sources/Gnostic \ + --proto_path=$GOPATH/src + +# relocate compiled protos +find Sources/Gnostic/github.com -name "*.pb.swift" -exec mv {} Sources/Gnostic \; + +# remove scaffolding of compilation +rm -rf Sources/Gnostic/github.com + +done + diff --git a/vendor/github.com/googleapis/gnostic/plugins/plugin.pb.go b/vendor/github.com/googleapis/gnostic/plugins/plugin.pb.go new file mode 100644 index 000000000..2cc3d0c5e --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/plugin.pb.go @@ -0,0 +1,296 @@ +// Code generated by protoc-gen-go. +// source: plugin.proto +// DO NOT EDIT! + +/* +Package gnostic_plugin_v1 is a generated protocol buffer package. + +It is generated from these files: + plugin.proto + +It has these top-level messages: + Version + Parameter + Request + Response + File +*/ +package gnostic_plugin_v1 + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import openapi_v2 "github.com/googleapis/gnostic/OpenAPIv2" +import openapi_v3 "github.com/googleapis/gnostic/OpenAPIv3" +import discovery_v1 "github.com/googleapis/gnostic/discovery" +import surface_v1 "github.com/googleapis/gnostic/surface" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The version number of gnostic. +type Version struct { + Major int32 `protobuf:"varint,1,opt,name=major" json:"major,omitempty"` + Minor int32 `protobuf:"varint,2,opt,name=minor" json:"minor,omitempty"` + Patch int32 `protobuf:"varint,3,opt,name=patch" json:"patch,omitempty"` + // A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should + // be empty for mainline stable releases. + Suffix string `protobuf:"bytes,4,opt,name=suffix" json:"suffix,omitempty"` +} + +func (m *Version) Reset() { *m = Version{} } +func (m *Version) String() string { return proto.CompactTextString(m) } +func (*Version) ProtoMessage() {} +func (*Version) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } + +func (m *Version) GetMajor() int32 { + if m != nil { + return m.Major + } + return 0 +} + +func (m *Version) GetMinor() int32 { + if m != nil { + return m.Minor + } + return 0 +} + +func (m *Version) GetPatch() int32 { + if m != nil { + return m.Patch + } + return 0 +} + +func (m *Version) GetSuffix() string { + if m != nil { + return m.Suffix + } + return "" +} + +// A parameter passed to the plugin from (or through) gnostic. +type Parameter struct { + // The name of the parameter as specified in the option string + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // The parameter value as specified in the option string + Value string `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` +} + +func (m *Parameter) Reset() { *m = Parameter{} } +func (m *Parameter) String() string { return proto.CompactTextString(m) } +func (*Parameter) ProtoMessage() {} +func (*Parameter) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } + +func (m *Parameter) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Parameter) GetValue() string { + if m != nil { + return m.Value + } + return "" +} + +// An encoded Request is written to the plugin's stdin. +type Request struct { + // filename or URL of the original source document + SourceName string `protobuf:"bytes,1,opt,name=source_name,json=sourceName" json:"source_name,omitempty"` + // Output path specified in the plugin invocation. + OutputPath string `protobuf:"bytes,2,opt,name=output_path,json=outputPath" json:"output_path,omitempty"` + // Plugin parameters parsed from the invocation string. + Parameters []*Parameter `protobuf:"bytes,3,rep,name=parameters" json:"parameters,omitempty"` + // The version number of gnostic. + CompilerVersion *Version `protobuf:"bytes,4,opt,name=compiler_version,json=compilerVersion" json:"compiler_version,omitempty"` + // OpenAPI v2 API representation + Openapi2 *openapi_v2.Document `protobuf:"bytes,5,opt,name=openapi2" json:"openapi2,omitempty"` + // OpenAPI v3 API representation + Openapi3 *openapi_v3.Document `protobuf:"bytes,6,opt,name=openapi3" json:"openapi3,omitempty"` + // Discovery API representation + Discovery *discovery_v1.Document `protobuf:"bytes,7,opt,name=discovery" json:"discovery,omitempty"` + // generated code surface representation + Surface *surface_v1.Model `protobuf:"bytes,8,opt,name=surface" json:"surface,omitempty"` +} + +func (m *Request) Reset() { *m = Request{} } +func (m *Request) String() string { return proto.CompactTextString(m) } +func (*Request) ProtoMessage() {} +func (*Request) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } + +func (m *Request) GetSourceName() string { + if m != nil { + return m.SourceName + } + return "" +} + +func (m *Request) GetOutputPath() string { + if m != nil { + return m.OutputPath + } + return "" +} + +func (m *Request) GetParameters() []*Parameter { + if m != nil { + return m.Parameters + } + return nil +} + +func (m *Request) GetCompilerVersion() *Version { + if m != nil { + return m.CompilerVersion + } + return nil +} + +func (m *Request) GetOpenapi2() *openapi_v2.Document { + if m != nil { + return m.Openapi2 + } + return nil +} + +func (m *Request) GetOpenapi3() *openapi_v3.Document { + if m != nil { + return m.Openapi3 + } + return nil +} + +func (m *Request) GetDiscovery() *discovery_v1.Document { + if m != nil { + return m.Discovery + } + return nil +} + +func (m *Request) GetSurface() *surface_v1.Model { + if m != nil { + return m.Surface + } + return nil +} + +// The plugin writes an encoded Response to stdout. +type Response struct { + // Error message. If non-empty, the plugin failed. + // The plugin process should exit with status code zero + // even if it reports an error in this way. + // + // This should be used to indicate errors which prevent the plugin from + // operating as intended. Errors which indicate a problem in openapic + // itself -- such as the input Document being unparseable -- should be + // reported by writing a message to stderr and exiting with a non-zero + // status code. + Errors []string `protobuf:"bytes,1,rep,name=errors" json:"errors,omitempty"` + // file output, each file will be written by openapic to an appropriate location. + Files []*File `protobuf:"bytes,2,rep,name=files" json:"files,omitempty"` +} + +func (m *Response) Reset() { *m = Response{} } +func (m *Response) String() string { return proto.CompactTextString(m) } +func (*Response) ProtoMessage() {} +func (*Response) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } + +func (m *Response) GetErrors() []string { + if m != nil { + return m.Errors + } + return nil +} + +func (m *Response) GetFiles() []*File { + if m != nil { + return m.Files + } + return nil +} + +// File describes a file generated by a plugin. +type File struct { + // name of the file + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // data to be written to the file + Data []byte `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` +} + +func (m *File) Reset() { *m = File{} } +func (m *File) String() string { return proto.CompactTextString(m) } +func (*File) ProtoMessage() {} +func (*File) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } + +func (m *File) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *File) GetData() []byte { + if m != nil { + return m.Data + } + return nil +} + +func init() { + proto.RegisterType((*Version)(nil), "gnostic.plugin.v1.Version") + proto.RegisterType((*Parameter)(nil), "gnostic.plugin.v1.Parameter") + proto.RegisterType((*Request)(nil), "gnostic.plugin.v1.Request") + proto.RegisterType((*Response)(nil), "gnostic.plugin.v1.Response") + proto.RegisterType((*File)(nil), "gnostic.plugin.v1.File") +} + +func init() { proto.RegisterFile("plugin.proto", fileDescriptor0) } + +var fileDescriptor0 = []byte{ + // 499 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0x4f, 0x8f, 0xd3, 0x3c, + 0x10, 0xc6, 0xd5, 0xa6, 0x7f, 0xa7, 0xfb, 0xbe, 0xb0, 0xd6, 0x6a, 0xb1, 0x56, 0x48, 0x5b, 0xf5, + 0x42, 0x25, 0x84, 0x4b, 0x1b, 0x10, 0x17, 0x2e, 0xac, 0x80, 0x15, 0x07, 0x76, 0x83, 0x0f, 0x5c, + 0x2b, 0x6f, 0xea, 0xa6, 0x46, 0x49, 0x6c, 0x6c, 0x27, 0x82, 0x4f, 0xc2, 0x9d, 0x4f, 0x8a, 0x62, + 0x27, 0x4d, 0xc5, 0x56, 0x68, 0x4f, 0x9d, 0xe7, 0xd1, 0xfc, 0x9e, 0xb8, 0x33, 0x03, 0x27, 0x2a, + 0x2d, 0x12, 0x91, 0x13, 0xa5, 0xa5, 0x95, 0xe8, 0x34, 0xc9, 0xa5, 0xb1, 0x22, 0x26, 0xb5, 0x5b, + 0x2e, 0x2f, 0xde, 0x24, 0xc2, 0xee, 0x8a, 0x3b, 0x12, 0xcb, 0x6c, 0x91, 0x48, 0x99, 0xa4, 0x9c, + 0x29, 0x61, 0x16, 0x75, 0xe3, 0xe2, 0x56, 0xf1, 0xfc, 0x5d, 0xf4, 0xa9, 0x5c, 0xb5, 0x95, 0xcf, + 0x7a, 0x28, 0x18, 0xb6, 0xd5, 0xc3, 0xc0, 0x8d, 0x30, 0xb1, 0x2c, 0xb9, 0xfe, 0xd9, 0x56, 0x35, + 0x18, 0xfe, 0x1b, 0x34, 0x85, 0xde, 0xb2, 0x98, 0x37, 0xbf, 0x1e, 0x9a, 0xc5, 0x30, 0xfc, 0xca, + 0xb5, 0x11, 0x32, 0x47, 0x67, 0xd0, 0xcf, 0xd8, 0x37, 0xa9, 0x71, 0x67, 0xda, 0x99, 0xf7, 0xa9, + 0x17, 0xce, 0x15, 0xb9, 0xd4, 0xb8, 0x5b, 0xbb, 0x95, 0xa8, 0x5c, 0xc5, 0x6c, 0xbc, 0xc3, 0x81, + 0x77, 0x9d, 0x40, 0xe7, 0x30, 0x30, 0xc5, 0x76, 0x2b, 0x7e, 0xe0, 0xde, 0xb4, 0x33, 0x1f, 0xd3, + 0x5a, 0xcd, 0x5e, 0xc3, 0x38, 0x62, 0x9a, 0x65, 0xdc, 0x72, 0x8d, 0x10, 0xf4, 0x72, 0x96, 0x71, + 0xf7, 0x95, 0x31, 0x75, 0x75, 0x15, 0x57, 0xb2, 0xb4, 0xe0, 0xee, 0x23, 0x63, 0xea, 0xc5, 0xec, + 0x57, 0x00, 0x43, 0xca, 0xbf, 0x17, 0xdc, 0x58, 0x74, 0x09, 0x13, 0x23, 0x0b, 0x1d, 0xf3, 0xf5, + 0x01, 0x0c, 0xde, 0xba, 0xa9, 0x22, 0x2e, 0x61, 0x22, 0x0b, 0xab, 0x0a, 0xbb, 0x56, 0xcc, 0xee, + 0xea, 0x20, 0xf0, 0x56, 0xc4, 0xec, 0x0e, 0xbd, 0x05, 0x50, 0xcd, 0x23, 0x0c, 0x0e, 0xa6, 0xc1, + 0x7c, 0xb2, 0x7a, 0x4a, 0xee, 0x6d, 0x9c, 0xec, 0x5f, 0x4a, 0x0f, 0xfa, 0xd1, 0x07, 0x78, 0x1c, + 0xcb, 0x4c, 0x89, 0x94, 0xeb, 0x75, 0xe9, 0x07, 0xe6, 0xfe, 0xe4, 0x64, 0x75, 0x71, 0x24, 0xa3, + 0x1e, 0x29, 0x7d, 0xd4, 0x30, 0xcd, 0x8c, 0x5f, 0xc2, 0x48, 0x2a, 0x9e, 0x33, 0x25, 0x56, 0xb8, + 0xef, 0xf0, 0x33, 0x52, 0x1b, 0xa4, 0x5c, 0x91, 0xf7, 0x32, 0x2e, 0x32, 0x9e, 0x5b, 0xba, 0xef, + 0x3a, 0x20, 0x42, 0x3c, 0xf8, 0x9b, 0x08, 0xef, 0x13, 0x21, 0x7a, 0x05, 0xe3, 0xfd, 0x69, 0xe0, + 0xa1, 0x43, 0xce, 0x49, 0x7b, 0x2c, 0xe5, 0xb2, 0x85, 0xda, 0x46, 0xf4, 0x1c, 0x86, 0xf5, 0x65, + 0xe0, 0x91, 0x63, 0x4e, 0x49, 0x73, 0x29, 0xe5, 0x92, 0x7c, 0x96, 0x1b, 0x9e, 0xd2, 0xa6, 0x63, + 0xf6, 0x05, 0x46, 0x94, 0x1b, 0x25, 0x73, 0xc3, 0xab, 0xa5, 0x73, 0xad, 0xa5, 0x36, 0xb8, 0x33, + 0x0d, 0xaa, 0xa5, 0x7b, 0x85, 0x5e, 0x40, 0x7f, 0x2b, 0x52, 0x6e, 0x70, 0xd7, 0x8d, 0xfa, 0xc9, + 0x91, 0x31, 0x7d, 0x14, 0x29, 0xa7, 0xbe, 0x6b, 0x46, 0xa0, 0x57, 0xc9, 0xa3, 0xe7, 0x81, 0xa0, + 0xb7, 0x61, 0x96, 0xb9, 0xa5, 0x9e, 0x50, 0x57, 0x5f, 0x3d, 0x83, 0xff, 0xa5, 0x4e, 0xf6, 0xa1, + 0xe5, 0xf2, 0xea, 0xbf, 0x6b, 0x5f, 0x47, 0x2e, 0x3f, 0xea, 0xfc, 0xee, 0x06, 0xd7, 0x37, 0xb7, + 0x77, 0x03, 0x77, 0xe8, 0xe1, 0x9f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x40, 0x96, 0xff, 0x27, 0xeb, + 0x03, 0x00, 0x00, +} diff --git a/vendor/github.com/googleapis/gnostic/plugins/plugin.proto b/vendor/github.com/googleapis/gnostic/plugins/plugin.proto new file mode 100644 index 000000000..b546f36d7 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/plugins/plugin.proto @@ -0,0 +1,129 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// gnostic can be extended with plugins. +// A plugin is uset a program that reads a Request from stdin +// and writes a Response to stdout. +// +// A plugin executable needs only to be placed somewhere in the path. The +// plugin should be named "gnostic_$NAME", and will then be used when the +// flag "--${NAME}_out" is passed to gnostic. + +syntax = "proto3"; + +import "github.com/googleapis/gnostic/OpenAPIv2/OpenAPIv2.proto"; +import "github.com/googleapis/gnostic/OpenAPIv3/OpenAPIv3.proto"; +import "github.com/googleapis/gnostic/discovery/discovery.proto"; +import "github.com/googleapis/gnostic/surface/surface.proto"; + +package gnostic.plugin.v1; + +// This option lets the proto compiler generate Java code inside the package +// name (see below) instead of inside an outer class. It creates a simpler +// developer experience by reducing one-level of name nesting and be +// consistent with most programming languages that don't support outer classes. +option java_multiple_files = true; + +// The Java outer classname should be the filename in UpperCamelCase. This +// class is only used to hold proto descriptor, so developers don't need to +// work with it directly. +option java_outer_classname = "GnosticPlugin"; + +// The Java package name must be proto package name with proper prefix. +option java_package = "org.gnostic.v1"; + +// A reasonable prefix for the Objective-C symbols generated from the package. +// It should at a minimum be 3 characters long, all uppercase, and convention +// is to use an abbreviation of the package name. Something short, but +// hopefully unique enough to not conflict with things that may come along in +// the future. 'GPB' is reserved for the protocol buffer implementation itself. +// +option objc_class_prefix = "GNO"; + +// The version number of gnostic. +message Version { + int32 major = 1; + int32 minor = 2; + int32 patch = 3; + // A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should + // be empty for mainline stable releases. + string suffix = 4; +} + +// A parameter passed to the plugin from (or through) gnostic. +message Parameter { + // The name of the parameter as specified in the option string + string name = 1; + + // The parameter value as specified in the option string + string value = 2; +} + +// An encoded Request is written to the plugin's stdin. +message Request { + + // filename or URL of the original source document + string source_name = 1; + + // Output path specified in the plugin invocation. + string output_path = 2; + + // Plugin parameters parsed from the invocation string. + repeated Parameter parameters = 3; + + // The version number of gnostic. + Version compiler_version = 4; + + // OpenAPI v2 API representation + openapi.v2.Document openapi2 = 5; + + // OpenAPI v3 API representation + openapi.v3.Document openapi3 = 6; + + // Discovery API representation + discovery.v1.Document discovery = 7; + + // generated code surface representation + surface.v1.Model surface = 8; +} + +// The plugin writes an encoded Response to stdout. +message Response { + + // Error message. If non-empty, the plugin failed. + // The plugin process should exit with status code zero + // even if it reports an error in this way. + // + // This should be used to indicate errors which prevent the plugin from + // operating as intended. Errors which indicate a problem in openapic + // itself -- such as the input Document being unparseable -- should be + // reported by writing a message to stderr and exiting with a non-zero + // status code. + repeated string errors = 1; + + // file output, each file will be written by openapic to an appropriate location. + repeated File files = 2; +} + +// File describes a file generated by a plugin. +message File { + + // name of the file + string name = 1; + + // data to be written to the file + bytes data = 2; +} + + diff --git a/vendor/github.com/googleapis/gnostic/printer/README.md b/vendor/github.com/googleapis/gnostic/printer/README.md new file mode 100644 index 000000000..ba2f8f2c0 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/printer/README.md @@ -0,0 +1,3 @@ +# printer + +This directory contains code for generating files of code. diff --git a/vendor/github.com/googleapis/gnostic/printer/code.go b/vendor/github.com/googleapis/gnostic/printer/code.go new file mode 100644 index 000000000..1afc252f1 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/printer/code.go @@ -0,0 +1,57 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package printer provides support for writing generated code. +package printer + +import ( + "fmt" +) + +const indentation = " " + +// Code represents a file of code to be printed. +type Code struct { + text string + indent int +} + +// Print adds a line of code using the current indentation. Accepts printf-style format strings and arguments. +func (c *Code) Print(args ...interface{}) { + if len(args) > 0 { + for i := 0; i < c.indent; i++ { + c.text += indentation + } + c.text += fmt.Sprintf(args[0].(string), args[1:]...) + } + c.text += "\n" +} + +// String returns the accumulated code as a string. +func (c *Code) String() string { + return c.text +} + +// Indent adds one level of indentation. +func (c *Code) Indent() { + c.indent++ +} + +// Outdent remvoes one level of indentation. +func (c *Code) Outdent() { + c.indent-- + if c.indent < 0 { + c.indent = 0 + } +} diff --git a/vendor/github.com/googleapis/gnostic/surface/README.md b/vendor/github.com/googleapis/gnostic/surface/README.md new file mode 100644 index 000000000..6870ccfce --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/surface/README.md @@ -0,0 +1,10 @@ +# API Code Surface + +This directory contains a Protocol Buffer-language model +suitable for generating support code for calling and +implementing an API. + +It can be generated from other formats read by gnostic +and passed to code generator plugins to assist them by +providing a preprocessed API description that is easier +to generate. \ No newline at end of file diff --git a/vendor/github.com/googleapis/gnostic/surface/field.go b/vendor/github.com/googleapis/gnostic/surface/field.go new file mode 100644 index 000000000..da540a5f6 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/surface/field.go @@ -0,0 +1,20 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package surface_v1 + +// ServiceType returns the Type associated with a field. +func (f *Field) ServiceType(m *Model) *Type { + return m.TypeWithTypeName(f.NativeType) +} diff --git a/vendor/github.com/googleapis/gnostic/surface/model.go b/vendor/github.com/googleapis/gnostic/surface/model.go new file mode 100644 index 000000000..943d2dd54 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/surface/model.go @@ -0,0 +1,58 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package surface_v1 + +import ( + "path" + "strings" +) + +func (m *Model) addType(t *Type) { + m.Types = append(m.Types, t) +} + +func (m *Model) addMethod(method *Method) { + m.Methods = append(m.Methods, method) +} + +func (m *Model) TypeWithTypeName(name string) *Type { + if name == "" { + return nil + } + for _, t := range m.Types { + if t.TypeName == name { + return t + } + } + return nil +} + +func generateOperationName(method, path string) string { + filteredPath := strings.Replace(path, "/", "_", -1) + filteredPath = strings.Replace(filteredPath, ".", "_", -1) + filteredPath = strings.Replace(filteredPath, "{", "", -1) + filteredPath = strings.Replace(filteredPath, "}", "", -1) + return strings.Title(method) + filteredPath +} + +func sanitizeOperationName(name string) string { + name = strings.Title(name) + name = strings.Replace(name, ".", "_", -1) + return name +} + +func typeForRef(ref string) (typeName string) { + return path.Base(ref) +} diff --git a/vendor/github.com/googleapis/gnostic/surface/model_openapiv2.go b/vendor/github.com/googleapis/gnostic/surface/model_openapiv2.go new file mode 100644 index 000000000..14c4eccf0 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/surface/model_openapiv2.go @@ -0,0 +1,251 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package surface_v1 + +import ( + "fmt" + + openapiv2 "github.com/googleapis/gnostic/OpenAPIv2" +) + +// NewModelFromOpenAPI2 builds a model of an API service for use in code generation. +func NewModelFromOpenAPI2(document *openapiv2.Document) (*Model, error) { + return newOpenAPI2Builder().buildModel(document) +} + +type OpenAPI2Builder struct { + model *Model +} + +func newOpenAPI2Builder() *OpenAPI2Builder { + return &OpenAPI2Builder{model: &Model{}} +} + +func (b *OpenAPI2Builder) buildModel(document *openapiv2.Document) (*Model, error) { + // Set model properties from passed-in document. + b.model.Name = document.Info.Title + b.model.Types = make([]*Type, 0) + b.model.Methods = make([]*Method, 0) + err := b.build(document) + if err != nil { + return nil, err + } + return b.model, nil +} + +// buildV2 builds an API service description, preprocessing its types and methods for code generation. +func (b *OpenAPI2Builder) build(document *openapiv2.Document) (err error) { + // Collect service type descriptions from Definitions section. + if document.Definitions != nil { + for _, pair := range document.Definitions.AdditionalProperties { + t, err := b.buildTypeFromDefinition(pair.Name, pair.Value) + if err != nil { + return err + } + b.model.addType(t) + } + } + // Collect service method descriptions from Paths section. + for _, pair := range document.Paths.Path { + v := pair.Value + if v.Get != nil { + b.buildMethodFromOperation(v.Get, "GET", pair.Name) + } + if v.Post != nil { + b.buildMethodFromOperation(v.Post, "POST", pair.Name) + } + if v.Put != nil { + b.buildMethodFromOperation(v.Put, "PUT", pair.Name) + } + if v.Delete != nil { + b.buildMethodFromOperation(v.Delete, "DELETE", pair.Name) + } + } + return err +} + +func (b *OpenAPI2Builder) buildTypeFromDefinition(name string, schema *openapiv2.Schema) (t *Type, err error) { + t = &Type{} + t.Name = name + t.Description = "implements the service definition of " + name + t.Fields = make([]*Field, 0) + if schema.Properties != nil { + if len(schema.Properties.AdditionalProperties) > 0 { + // If the schema has properties, generate a struct. + t.Kind = TypeKind_STRUCT + } + for _, pair2 := range schema.Properties.AdditionalProperties { + var f Field + f.Name = pair2.Name + f.Kind, f.Type, f.Format = b.typeForSchema(pair2.Value) + f.Serialize = true + t.addField(&f) + } + } + if len(t.Fields) == 0 { + if schema.AdditionalProperties != nil { + // If the schema has no fixed properties and additional properties of a specified type, + // generate a map pointing to objects of that type. + t.Kind = TypeKind_OBJECT + t.ContentType = typeForRef(schema.AdditionalProperties.GetSchema().XRef) + } + } + return t, err +} + +func (b *OpenAPI2Builder) buildMethodFromOperation(op *openapiv2.Operation, method string, path string) (err error) { + var m Method + m.Operation = op.OperationId + m.Path = path + m.Method = method + m.Description = op.Description + + m.Name = sanitizeOperationName(op.OperationId) + if m.Name == "" { + m.Name = generateOperationName(method, path) + } + + m.ParametersTypeName, err = b.buildTypeFromParameters(m.Name, op.Parameters) + m.ResponsesTypeName, err = b.buildTypeFromResponses(&m, m.Name, op.Responses) + b.model.addMethod(&m) + return err +} + +func (b *OpenAPI2Builder) buildTypeFromParameters(name string, parameters []*openapiv2.ParametersItem) (typeName string, err error) { + t := &Type{} + t.Name = name + "Parameters" + t.Description = t.Name + " holds parameters to " + name + t.Kind = TypeKind_STRUCT + t.Fields = make([]*Field, 0) + for _, parametersItem := range parameters { + var f Field + f.Type = fmt.Sprintf("%+v", parametersItem) + parameter := parametersItem.GetParameter() + if parameter != nil { + bodyParameter := parameter.GetBodyParameter() + if bodyParameter != nil { + f.Name = bodyParameter.Name + if bodyParameter.Schema != nil { + f.Kind, f.Type, f.Format = b.typeForSchema(bodyParameter.Schema) + } + f.Position = Position_BODY + } + nonBodyParameter := parameter.GetNonBodyParameter() + if nonBodyParameter != nil { + headerParameter := nonBodyParameter.GetHeaderParameterSubSchema() + if headerParameter != nil { + f.Name = headerParameter.Name + f.Type = headerParameter.Type + f.Position = Position_HEADER + } + formDataParameter := nonBodyParameter.GetFormDataParameterSubSchema() + if formDataParameter != nil { + f.Name = formDataParameter.Name + f.Type = formDataParameter.Type + f.Position = Position_FORMDATA + } + queryParameter := nonBodyParameter.GetQueryParameterSubSchema() + if queryParameter != nil { + f.Name = queryParameter.Name + f.Type = queryParameter.Type + f.Position = Position_QUERY + } + pathParameter := nonBodyParameter.GetPathParameterSubSchema() + if pathParameter != nil { + f.Name = pathParameter.Name + f.Type = pathParameter.Type + f.Format = pathParameter.Format + f.Position = Position_PATH + } + } + f.Serialize = true + t.addField(&f) + } + } + if len(t.Fields) > 0 { + b.model.addType(t) + return t.Name, err + } + return "", err +} + +func (b *OpenAPI2Builder) buildTypeFromResponses(m *Method, name string, responses *openapiv2.Responses) (typeName string, err error) { + t := &Type{} + t.Name = name + "Responses" + t.Description = t.Name + " holds responses of " + name + t.Kind = TypeKind_STRUCT + t.Fields = make([]*Field, 0) + + for _, responseCode := range responses.ResponseCode { + var f Field + f.Name = responseCode.Name + f.Serialize = false + response := responseCode.Value.GetResponse() + if response != nil && response.Schema != nil && response.Schema.GetSchema() != nil { + f.Kind, f.Type, f.Format = b.typeForSchema(response.Schema.GetSchema()) + f.Kind = FieldKind_REFERENCE + t.addField(&f) + } + } + + if len(t.Fields) > 0 { + b.model.addType(t) + return t.Name, err + } + return "", err +} + +func (b *OpenAPI2Builder) typeForSchema(schema *openapiv2.Schema) (kind FieldKind, typeName, format string) { + ref := schema.XRef + format = schema.Format + if ref != "" { + return FieldKind_SCALAR, typeForRef(ref), format + } + if schema.Type != nil { + types := schema.Type.Value + if len(types) == 1 && types[0] == "string" { + return FieldKind_SCALAR, "string", format + } + if len(types) == 1 && types[0] == "integer" && format == "int32" { + return FieldKind_SCALAR, "integer", format + } + if len(types) == 1 && types[0] == "integer" { + return FieldKind_SCALAR, "integer", format + } + if len(types) == 1 && types[0] == "number" { + return FieldKind_SCALAR, "number", format + } + if len(types) == 1 && types[0] == "array" && schema.Items != nil { + // we have an array.., but of what? + items := schema.Items.Schema + if len(items) == 1 && items[0].XRef != "" { + return FieldKind_ARRAY, typeForRef(items[0].XRef), format + } + } + if len(types) == 1 && types[0] == "object" && schema.AdditionalProperties == nil { + return FieldKind_MAP, "object", format + } + } + if schema.AdditionalProperties != nil { + additionalProperties := schema.AdditionalProperties + if propertySchema := additionalProperties.GetSchema(); propertySchema != nil { + if ref := propertySchema.XRef; ref != "" { + return FieldKind_MAP, typeForRef(ref), format + } + } + } + // this function is incomplete... so return a string representing anything that we don't handle + return FieldKind_SCALAR, fmt.Sprintf("%v", schema), format +} diff --git a/vendor/github.com/googleapis/gnostic/surface/model_openapiv3.go b/vendor/github.com/googleapis/gnostic/surface/model_openapiv3.go new file mode 100644 index 000000000..e0892cf1b --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/surface/model_openapiv3.go @@ -0,0 +1,303 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package surface_v1 + +import ( + "errors" + "fmt" + "log" + + openapiv3 "github.com/googleapis/gnostic/OpenAPIv3" + "strings" +) + +// NewModelFromOpenAPIv3 builds a model of an API service for use in code generation. +func NewModelFromOpenAPI3(document *openapiv3.Document) (*Model, error) { + return newOpenAPI3Builder().buildModel(document) +} + +type OpenAPI3Builder struct { + model *Model +} + +func newOpenAPI3Builder() *OpenAPI3Builder { + return &OpenAPI3Builder{model: &Model{}} +} + +func (b *OpenAPI3Builder) buildModel(document *openapiv3.Document) (*Model, error) { + // Set model properties from passed-in document. + b.model.Name = document.Info.Title + b.model.Types = make([]*Type, 0) + b.model.Methods = make([]*Method, 0) + err := b.build(document) + if err != nil { + return nil, err + } + return b.model, nil +} + +// build builds an API service description, preprocessing its types and methods for code generation. +func (b *OpenAPI3Builder) build(document *openapiv3.Document) (err error) { + // Collect service type descriptions from Components/Schemas. + if document.Components != nil && document.Components.Schemas != nil { + for _, pair := range document.Components.Schemas.AdditionalProperties { + t, err := b.buildTypeFromSchemaOrReference(pair.Name, pair.Value) + if err != nil { + return err + } + if t != nil { + b.model.addType(t) + } + } + } + // Collect service method descriptions from each PathItem. + for _, pair := range document.Paths.Path { + b.buildMethodFromPathItem(pair.Name, pair.Value) + } + return err +} + +// buildTypeFromSchemaOrReference builds a service type description from a schema in the API description. +func (b *OpenAPI3Builder) buildTypeFromSchemaOrReference( + name string, + schemaOrReference *openapiv3.SchemaOrReference) (t *Type, err error) { + if schema := schemaOrReference.GetSchema(); schema != nil { + t = &Type{} + t.Name = name + t.Description = "implements the service definition of " + name + t.Fields = make([]*Field, 0) + if schema.Properties != nil { + if len(schema.Properties.AdditionalProperties) > 0 { + // If the schema has properties, generate a struct. + t.Kind = TypeKind_STRUCT + } + for _, pair2 := range schema.Properties.AdditionalProperties { + if schema := pair2.Value; schema != nil { + var f Field + f.Name = pair2.Name + f.Kind, f.Type, f.Format = b.typeForSchemaOrReference(schema) + f.Serialize = true + t.addField(&f) + } + } + } + if len(t.Fields) == 0 { + if schema.AdditionalProperties != nil { + // If the schema has no fixed properties and additional properties of a specified type, + // generate a map pointing to objects of that type. + t.Kind = TypeKind_OBJECT + t.ContentType = typeForRef(schema.AdditionalProperties.GetSchemaOrReference().GetReference().GetXRef()) + } + } + return t, err + } else { + return nil, errors.New("unable to determine service type for referenced schema " + name) + } +} + +// buildMethodFromOperation builds a service method description +func (b *OpenAPI3Builder) buildMethodFromPathItem( + path string, + pathItem *openapiv3.PathItem) (err error) { + for _, method := range []string{"GET", "PUT", "POST", "DELETE", "OPTIONS", "HEAD", "PATCH", "TRACE"} { + var op *openapiv3.Operation + switch method { + case "GET": + op = pathItem.Get + case "PUT": + op = pathItem.Put + case "POST": + op = pathItem.Post + case "DELETE": + op = pathItem.Delete + case "OPTIONS": + op = pathItem.Options + case "HEAD": + op = pathItem.Head + case "PATCH": + op = pathItem.Patch + case "TRACE": + op = pathItem.Trace + } + if op != nil { + var m Method + m.Operation = op.OperationId + m.Path = path + m.Method = method + m.Name = sanitizeOperationName(op.OperationId) + if m.Name == "" { + m.Name = generateOperationName(method, path) + } + m.Description = op.Description + m.ParametersTypeName, err = b.buildTypeFromParameters(m.Name, op.Parameters, op.RequestBody) + m.ResponsesTypeName, err = b.buildTypeFromResponses(&m, m.Name, op.Responses) + b.model.addMethod(&m) + } + } + return err +} + +// buildTypeFromParameters builds a service type description from the parameters of an API method +func (b *OpenAPI3Builder) buildTypeFromParameters( + name string, + parameters []*openapiv3.ParameterOrReference, + requestBody *openapiv3.RequestBodyOrReference) (typeName string, err error) { + t := &Type{} + t.Name = name + "Parameters" + t.Description = t.Name + " holds parameters to " + name + t.Kind = TypeKind_STRUCT + t.Fields = make([]*Field, 0) + for _, parametersItem := range parameters { + var f Field + f.Type = fmt.Sprintf("%+v", parametersItem) + parameter := parametersItem.GetParameter() + if parameter != nil { + switch parameter.In { + case "body": + f.Position = Position_BODY + case "header": + f.Position = Position_HEADER + case "formdata": + f.Position = Position_FORMDATA + case "query": + f.Position = Position_QUERY + case "path": + f.Position = Position_PATH + } + f.Name = parameter.Name + if parameter.GetSchema() != nil && parameter.GetSchema() != nil { + f.Kind, f.Type, f.Format = b.typeForSchemaOrReference(parameter.GetSchema()) + } + f.Serialize = true + t.addField(&f) + } + } + if requestBody != nil { + content := requestBody.GetRequestBody().GetContent() + if content != nil { + for _, pair2 := range content.GetAdditionalProperties() { + var f Field + f.Position = Position_BODY + f.Kind, f.Type, f.Format = b.typeForSchemaOrReference(pair2.GetValue().GetSchema()) + f.Name = strings.ToLower(f.Type) // use the schema name as the parameter name, since none is directly specified + f.Serialize = true + t.addField(&f) + } + } + } + if len(t.Fields) > 0 { + b.model.addType(t) + return t.Name, err + } + return "", err +} + +// buildTypeFromResponses builds a service type description from the responses of an API method +func (b *OpenAPI3Builder) buildTypeFromResponses( + m *Method, + name string, + responses *openapiv3.Responses) (typeName string, err error) { + t := &Type{} + t.Name = name + "Responses" + t.Description = t.Name + " holds responses of " + name + t.Kind = TypeKind_STRUCT + t.Fields = make([]*Field, 0) + + addResponse := func(name string, value *openapiv3.ResponseOrReference) { + var f Field + f.Name = name + f.Serialize = false + response := value.GetResponse() + if response != nil && response.GetContent() != nil { + for _, pair2 := range response.GetContent().GetAdditionalProperties() { + f.Kind, f.Type, f.Format = b.typeForSchemaOrReference(pair2.GetValue().GetSchema()) + f.Kind = FieldKind_REFERENCE + t.addField(&f) + } + } + } + + for _, pair := range responses.ResponseOrReference { + addResponse(pair.Name, pair.Value) + } + if responses.Default != nil { + addResponse("default", responses.Default) + } + + if len(t.Fields) > 0 { + b.model.addType(t) + return t.Name, err + } + return "", err +} + +// typeForSchemaOrReference determines the language-specific type of a schema or reference +func (b *OpenAPI3Builder) typeForSchemaOrReference(value *openapiv3.SchemaOrReference) (kind FieldKind, typeName, format string) { + if value.GetSchema() != nil { + return b.typeForSchema(value.GetSchema()) + } + if value.GetReference() != nil { + return FieldKind_SCALAR, typeForRef(value.GetReference().XRef), "" + } + return FieldKind_SCALAR, "todo", "" +} + +// typeForSchema determines the language-specific type of a schema +func (b *OpenAPI3Builder) typeForSchema(schema *openapiv3.Schema) (kind FieldKind, typeName, format string) { + if schema.Type != "" { + format := schema.Format + switch schema.Type { + case "string": + return FieldKind_SCALAR, "string", format + case "integer": + return FieldKind_SCALAR, "integer", format + case "number": + return FieldKind_SCALAR, "number", format + case "boolean": + return FieldKind_SCALAR, "boolean", format + case "array": + if schema.Items != nil { + // we have an array.., but of what? + items := schema.Items.SchemaOrReference + if len(items) == 1 { + if items[0].GetReference().GetXRef() != "" { + return FieldKind_ARRAY, typeForRef(items[0].GetReference().GetXRef()), format + } else if items[0].GetSchema().Type == "string" { + return FieldKind_ARRAY, "string", format + } else if items[0].GetSchema().Type == "object" { + return FieldKind_ARRAY, "interface{}", format + } + } + } + case "object": + if schema.AdditionalProperties == nil { + return FieldKind_MAP, "object", format + } + default: + + } + } + if schema.AdditionalProperties != nil { + additionalProperties := schema.AdditionalProperties + if propertySchema := additionalProperties.GetSchemaOrReference().GetReference(); propertySchema != nil { + if ref := propertySchema.XRef; ref != "" { + return FieldKind_MAP, "map[string]" + typeForRef(ref), "" + } + } + } + // this function is incomplete... return a string representing anything that we don't handle + log.Printf("unimplemented: %v", schema) + return FieldKind_SCALAR, fmt.Sprintf("unimplemented: %v", schema), "" +} diff --git a/vendor/github.com/googleapis/gnostic/surface/surface.pb.go b/vendor/github.com/googleapis/gnostic/surface/surface.pb.go new file mode 100644 index 000000000..d9836c2df --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/surface/surface.pb.go @@ -0,0 +1,424 @@ +// Code generated by protoc-gen-go. +// source: surface.proto +// DO NOT EDIT! + +/* +Package surface_v1 is a generated protocol buffer package. + +It is generated from these files: + surface.proto + +It has these top-level messages: + Field + Type + Method + Model +*/ +package surface_v1 + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type FieldKind int32 + +const ( + FieldKind_SCALAR FieldKind = 0 + FieldKind_MAP FieldKind = 1 + FieldKind_ARRAY FieldKind = 2 + FieldKind_REFERENCE FieldKind = 3 +) + +var FieldKind_name = map[int32]string{ + 0: "SCALAR", + 1: "MAP", + 2: "ARRAY", + 3: "REFERENCE", +} +var FieldKind_value = map[string]int32{ + "SCALAR": 0, + "MAP": 1, + "ARRAY": 2, + "REFERENCE": 3, +} + +func (x FieldKind) String() string { + return proto.EnumName(FieldKind_name, int32(x)) +} +func (FieldKind) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } + +type TypeKind int32 + +const ( + TypeKind_STRUCT TypeKind = 0 + TypeKind_OBJECT TypeKind = 1 +) + +var TypeKind_name = map[int32]string{ + 0: "STRUCT", + 1: "OBJECT", +} +var TypeKind_value = map[string]int32{ + "STRUCT": 0, + "OBJECT": 1, +} + +func (x TypeKind) String() string { + return proto.EnumName(TypeKind_name, int32(x)) +} +func (TypeKind) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } + +type Position int32 + +const ( + Position_BODY Position = 0 + Position_HEADER Position = 1 + Position_FORMDATA Position = 2 + Position_QUERY Position = 3 + Position_PATH Position = 4 +) + +var Position_name = map[int32]string{ + 0: "BODY", + 1: "HEADER", + 2: "FORMDATA", + 3: "QUERY", + 4: "PATH", +} +var Position_value = map[string]int32{ + "BODY": 0, + "HEADER": 1, + "FORMDATA": 2, + "QUERY": 3, + "PATH": 4, +} + +func (x Position) String() string { + return proto.EnumName(Position_name, int32(x)) +} +func (Position) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } + +// Field is a field in a definition and can be associated with +// a position in a request structure. +type Field struct { + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Type string `protobuf:"bytes,2,opt,name=type" json:"type,omitempty"` + Kind FieldKind `protobuf:"varint,3,opt,name=kind,enum=surface.v1.FieldKind" json:"kind,omitempty"` + Format string `protobuf:"bytes,4,opt,name=format" json:"format,omitempty"` + Position Position `protobuf:"varint,5,opt,name=position,enum=surface.v1.Position" json:"position,omitempty"` + NativeType string `protobuf:"bytes,6,opt,name=nativeType" json:"nativeType,omitempty"` + FieldName string `protobuf:"bytes,7,opt,name=fieldName" json:"fieldName,omitempty"` + ParameterName string `protobuf:"bytes,8,opt,name=parameterName" json:"parameterName,omitempty"` + Serialize bool `protobuf:"varint,9,opt,name=serialize" json:"serialize,omitempty"` +} + +func (m *Field) Reset() { *m = Field{} } +func (m *Field) String() string { return proto.CompactTextString(m) } +func (*Field) ProtoMessage() {} +func (*Field) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } + +func (m *Field) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Field) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *Field) GetKind() FieldKind { + if m != nil { + return m.Kind + } + return FieldKind_SCALAR +} + +func (m *Field) GetFormat() string { + if m != nil { + return m.Format + } + return "" +} + +func (m *Field) GetPosition() Position { + if m != nil { + return m.Position + } + return Position_BODY +} + +func (m *Field) GetNativeType() string { + if m != nil { + return m.NativeType + } + return "" +} + +func (m *Field) GetFieldName() string { + if m != nil { + return m.FieldName + } + return "" +} + +func (m *Field) GetParameterName() string { + if m != nil { + return m.ParameterName + } + return "" +} + +func (m *Field) GetSerialize() bool { + if m != nil { + return m.Serialize + } + return false +} + +// Type typically corresponds to a definition, parameter, or response +// in an API and is represented by a type in generated code. +type Type struct { + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Kind TypeKind `protobuf:"varint,2,opt,name=kind,enum=surface.v1.TypeKind" json:"kind,omitempty"` + Description string `protobuf:"bytes,3,opt,name=description" json:"description,omitempty"` + ContentType string `protobuf:"bytes,4,opt,name=contentType" json:"contentType,omitempty"` + Fields []*Field `protobuf:"bytes,5,rep,name=fields" json:"fields,omitempty"` + TypeName string `protobuf:"bytes,6,opt,name=typeName" json:"typeName,omitempty"` +} + +func (m *Type) Reset() { *m = Type{} } +func (m *Type) String() string { return proto.CompactTextString(m) } +func (*Type) ProtoMessage() {} +func (*Type) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } + +func (m *Type) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Type) GetKind() TypeKind { + if m != nil { + return m.Kind + } + return TypeKind_STRUCT +} + +func (m *Type) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Type) GetContentType() string { + if m != nil { + return m.ContentType + } + return "" +} + +func (m *Type) GetFields() []*Field { + if m != nil { + return m.Fields + } + return nil +} + +func (m *Type) GetTypeName() string { + if m != nil { + return m.TypeName + } + return "" +} + +// Method is an operation of an API and typically has associated client and server code. +type Method struct { + Operation string `protobuf:"bytes,1,opt,name=operation" json:"operation,omitempty"` + Path string `protobuf:"bytes,2,opt,name=path" json:"path,omitempty"` + Method string `protobuf:"bytes,3,opt,name=method" json:"method,omitempty"` + Description string `protobuf:"bytes,4,opt,name=description" json:"description,omitempty"` + Name string `protobuf:"bytes,5,opt,name=name" json:"name,omitempty"` + HandlerName string `protobuf:"bytes,6,opt,name=handlerName" json:"handlerName,omitempty"` + ProcessorName string `protobuf:"bytes,7,opt,name=processorName" json:"processorName,omitempty"` + ClientName string `protobuf:"bytes,8,opt,name=clientName" json:"clientName,omitempty"` + ParametersTypeName string `protobuf:"bytes,9,opt,name=parametersTypeName" json:"parametersTypeName,omitempty"` + ResponsesTypeName string `protobuf:"bytes,10,opt,name=responsesTypeName" json:"responsesTypeName,omitempty"` +} + +func (m *Method) Reset() { *m = Method{} } +func (m *Method) String() string { return proto.CompactTextString(m) } +func (*Method) ProtoMessage() {} +func (*Method) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } + +func (m *Method) GetOperation() string { + if m != nil { + return m.Operation + } + return "" +} + +func (m *Method) GetPath() string { + if m != nil { + return m.Path + } + return "" +} + +func (m *Method) GetMethod() string { + if m != nil { + return m.Method + } + return "" +} + +func (m *Method) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Method) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Method) GetHandlerName() string { + if m != nil { + return m.HandlerName + } + return "" +} + +func (m *Method) GetProcessorName() string { + if m != nil { + return m.ProcessorName + } + return "" +} + +func (m *Method) GetClientName() string { + if m != nil { + return m.ClientName + } + return "" +} + +func (m *Method) GetParametersTypeName() string { + if m != nil { + return m.ParametersTypeName + } + return "" +} + +func (m *Method) GetResponsesTypeName() string { + if m != nil { + return m.ResponsesTypeName + } + return "" +} + +// Model represents an API for code generation. +type Model struct { + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Types []*Type `protobuf:"bytes,2,rep,name=types" json:"types,omitempty"` + Methods []*Method `protobuf:"bytes,3,rep,name=methods" json:"methods,omitempty"` +} + +func (m *Model) Reset() { *m = Model{} } +func (m *Model) String() string { return proto.CompactTextString(m) } +func (*Model) ProtoMessage() {} +func (*Model) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } + +func (m *Model) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Model) GetTypes() []*Type { + if m != nil { + return m.Types + } + return nil +} + +func (m *Model) GetMethods() []*Method { + if m != nil { + return m.Methods + } + return nil +} + +func init() { + proto.RegisterType((*Field)(nil), "surface.v1.Field") + proto.RegisterType((*Type)(nil), "surface.v1.Type") + proto.RegisterType((*Method)(nil), "surface.v1.Method") + proto.RegisterType((*Model)(nil), "surface.v1.Model") + proto.RegisterEnum("surface.v1.FieldKind", FieldKind_name, FieldKind_value) + proto.RegisterEnum("surface.v1.TypeKind", TypeKind_name, TypeKind_value) + proto.RegisterEnum("surface.v1.Position", Position_name, Position_value) +} + +func init() { proto.RegisterFile("surface.proto", fileDescriptor0) } + +var fileDescriptor0 = []byte{ + // 573 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x54, 0x5f, 0x6f, 0xd3, 0x3e, + 0x14, 0x5d, 0xfe, 0x36, 0xb9, 0xfd, 0xf5, 0xa7, 0xcc, 0x02, 0x14, 0x21, 0x84, 0xa2, 0x0a, 0xa1, + 0xae, 0x9a, 0x2a, 0x18, 0x6f, 0xbc, 0x65, 0x6d, 0xaa, 0x09, 0xe8, 0x5a, 0x4c, 0xf6, 0xd0, 0xc7, + 0xd0, 0xb8, 0x6a, 0x44, 0x1b, 0x87, 0x38, 0x4c, 0x82, 0x0f, 0xc4, 0xc7, 0x81, 0xaf, 0x84, 0x7c, + 0x93, 0xb4, 0xde, 0xda, 0x37, 0xfb, 0xdc, 0xe3, 0x6b, 0x9f, 0x73, 0x6e, 0x02, 0x3d, 0xf1, 0xa3, + 0x5c, 0x27, 0x2b, 0x36, 0x2a, 0x4a, 0x5e, 0x71, 0x02, 0xed, 0xf6, 0xfe, 0x6d, 0xff, 0xb7, 0x0e, + 0xd6, 0x34, 0x63, 0xdb, 0x94, 0x10, 0x30, 0xf3, 0x64, 0xc7, 0x7c, 0x2d, 0xd0, 0x06, 0x2e, 0xc5, + 0xb5, 0xc4, 0xaa, 0x9f, 0x05, 0xf3, 0xf5, 0x1a, 0x93, 0x6b, 0x72, 0x01, 0xe6, 0xb7, 0x2c, 0x4f, + 0x7d, 0x23, 0xd0, 0x06, 0xff, 0x5f, 0x3d, 0x1d, 0x1d, 0x9a, 0x8d, 0xb0, 0xd1, 0xc7, 0x2c, 0x4f, + 0x29, 0x52, 0xc8, 0x33, 0xb0, 0xd7, 0xbc, 0xdc, 0x25, 0x95, 0x6f, 0x62, 0x83, 0x66, 0x47, 0xde, + 0x80, 0x53, 0x70, 0x91, 0x55, 0x19, 0xcf, 0x7d, 0x0b, 0xdb, 0x3c, 0x51, 0xdb, 0x2c, 0x9a, 0x1a, + 0xdd, 0xb3, 0xc8, 0x4b, 0x80, 0x3c, 0xa9, 0xb2, 0x7b, 0x16, 0xcb, 0xe7, 0xd8, 0xd8, 0x4d, 0x41, + 0xc8, 0x0b, 0x70, 0xd7, 0xf2, 0xf2, 0x5b, 0xa9, 0xa0, 0x83, 0xe5, 0x03, 0x40, 0x5e, 0x41, 0xaf, + 0x48, 0xca, 0x64, 0xc7, 0x2a, 0x56, 0x22, 0xc3, 0x41, 0xc6, 0x43, 0x50, 0xf6, 0x10, 0xac, 0xcc, + 0x92, 0x6d, 0xf6, 0x8b, 0xf9, 0x6e, 0xa0, 0x0d, 0x1c, 0x7a, 0x00, 0xfa, 0x7f, 0x35, 0x30, 0xf1, + 0xaa, 0x53, 0x3e, 0x0d, 0x1a, 0x4f, 0xf4, 0x63, 0x31, 0xf2, 0x8c, 0x62, 0x49, 0x00, 0xdd, 0x94, + 0x89, 0x55, 0x99, 0x15, 0xa8, 0xde, 0xc0, 0x26, 0x2a, 0x24, 0x19, 0x2b, 0x9e, 0x57, 0x2c, 0xaf, + 0x50, 0x6b, 0xed, 0x9c, 0x0a, 0x91, 0x0b, 0xb0, 0x51, 0x9b, 0xf0, 0xad, 0xc0, 0x18, 0x74, 0xaf, + 0xce, 0x8f, 0x32, 0xa0, 0x0d, 0x81, 0x3c, 0x07, 0x47, 0x86, 0x86, 0xa2, 0x6b, 0xd7, 0xf6, 0xfb, + 0xfe, 0x1f, 0x1d, 0xec, 0x19, 0xab, 0x36, 0x3c, 0x95, 0xd2, 0x79, 0xc1, 0xca, 0x04, 0xdf, 0x54, + 0x0b, 0x3b, 0x00, 0x52, 0x71, 0x91, 0x54, 0x9b, 0x76, 0x0a, 0xe4, 0x5a, 0x46, 0xbb, 0xc3, 0xb3, + 0x8d, 0x84, 0x66, 0xf7, 0x58, 0x9f, 0x79, 0xac, 0xaf, 0xf5, 0xcf, 0x52, 0xfc, 0x0b, 0xa0, 0xbb, + 0x49, 0xf2, 0x74, 0xdb, 0xc4, 0x53, 0xbf, 0x54, 0x85, 0x30, 0xc2, 0x92, 0xaf, 0x98, 0x10, 0xbc, + 0x54, 0x42, 0x7e, 0x08, 0xca, 0x31, 0x59, 0x6d, 0x33, 0x96, 0x57, 0x4a, 0xca, 0x0a, 0x42, 0x46, + 0x40, 0xf6, 0x99, 0x8b, 0xb8, 0x35, 0xc6, 0x45, 0xde, 0x89, 0x0a, 0xb9, 0x84, 0xf3, 0x92, 0x89, + 0x82, 0xe7, 0x82, 0x1d, 0xe8, 0x80, 0xf4, 0xe3, 0x42, 0xff, 0x3b, 0x58, 0x33, 0x9e, 0xb2, 0xed, + 0xc9, 0x11, 0x79, 0x0d, 0x96, 0x74, 0x5e, 0xf8, 0x3a, 0x66, 0xe6, 0x3d, 0x9e, 0x11, 0x5a, 0x97, + 0xc9, 0x25, 0x74, 0x6a, 0x2b, 0x85, 0x6f, 0x20, 0x93, 0xa8, 0xcc, 0x3a, 0x2f, 0xda, 0x52, 0x86, + 0xef, 0xc1, 0xdd, 0x7f, 0x74, 0x04, 0xc0, 0xfe, 0x32, 0x0e, 0x3f, 0x85, 0xd4, 0x3b, 0x23, 0x1d, + 0x30, 0x66, 0xe1, 0xc2, 0xd3, 0x88, 0x0b, 0x56, 0x48, 0x69, 0xb8, 0xf4, 0x74, 0xd2, 0x03, 0x97, + 0x46, 0xd3, 0x88, 0x46, 0xb7, 0xe3, 0xc8, 0x33, 0x86, 0x7d, 0x70, 0xda, 0xe1, 0xc4, 0xa3, 0x31, + 0xbd, 0x1b, 0xc7, 0xde, 0x99, 0x5c, 0xcf, 0xaf, 0x3f, 0x44, 0xe3, 0xd8, 0xd3, 0x86, 0x63, 0x70, + 0xda, 0xaf, 0x91, 0x38, 0x60, 0x5e, 0xcf, 0x27, 0xcb, 0x9a, 0x71, 0x13, 0x85, 0x93, 0x88, 0x7a, + 0x1a, 0xf9, 0x0f, 0x9c, 0xe9, 0x9c, 0xce, 0x26, 0x61, 0x1c, 0x7a, 0xba, 0xbc, 0xed, 0xf3, 0x5d, + 0x44, 0x97, 0x9e, 0x21, 0xe9, 0x8b, 0x30, 0xbe, 0xf1, 0xcc, 0xaf, 0x36, 0xfe, 0x76, 0xde, 0xfd, + 0x0b, 0x00, 0x00, 0xff, 0xff, 0x15, 0x52, 0x6a, 0x89, 0x87, 0x04, 0x00, 0x00, +} diff --git a/vendor/github.com/googleapis/gnostic/surface/surface.proto b/vendor/github.com/googleapis/gnostic/surface/surface.proto new file mode 100644 index 000000000..3b548a66d --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/surface/surface.proto @@ -0,0 +1,90 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Model an API surface for code generation. + +syntax = "proto3"; + +package surface.v1; + +enum FieldKind { + SCALAR = 0; + MAP = 1; + ARRAY = 2; + REFERENCE = 3; +} + +enum TypeKind { + STRUCT = 0; // implement with named fields + OBJECT = 1; // implement with a map +} + +enum Position { + BODY = 0; + HEADER = 1; + FORMDATA = 2; + QUERY = 3; + PATH = 4; +} + +// Field is a field in a definition and can be associated with +// a position in a request structure. +message Field { + string name = 1; // the name as specified in the API description + string type = 2; // the specified content type of the field + FieldKind kind = 3; // what kind of thing is this field? scalar, reference, array, map of strings to the specified type + string format = 4; // the specified format of the field + Position position = 5; // "body", "header", "formdata", "query", or "path" + + string nativeType = 6; // the programming-language native type of the field + string fieldName = 7; // the name to use for a data structure field + string parameterName = 8; // the name to use for a function parameter + + bool serialize = 9; // true if this field should be serialized (to JSON, etc) +} + +// Type typically corresponds to a definition, parameter, or response +// in an API and is represented by a type in generated code. +message Type { + string name = 1; // the name to use for the type + TypeKind kind = 2; // a meta-description of the type (struct, map, etc) + string description = 3; // a comment describing the type + string contentType = 4; // if the type is a map, this is its content type + repeated Field fields = 5; // the fields of the type + + string typeName = 6; // language-specific type name +} + +// Method is an operation of an API and typically has associated client and server code. +message Method { + string operation = 1; // Operation ID + string path = 2; // HTTP path + string method = 3; // HTTP method name + string description = 4; // description of method + + string name = 5; // Operation name, possibly generated from method and path + string handlerName = 6; // name of the generated handler + string processorName = 7; // name of the processing function in the service interface + string clientName = 8; // name of client + + string parametersTypeName = 9; // parameters (input), with fields corresponding to input parameters + string responsesTypeName = 10; // responses (output), with fields corresponding to possible response values +} + +// Model represents an API for code generation. +message Model { + string name = 1; // a free-form title for the API + repeated Type types = 2; // the types used by the API + repeated Method methods = 3; // the methods (functions) of the API +} diff --git a/vendor/github.com/googleapis/gnostic/surface/type.go b/vendor/github.com/googleapis/gnostic/surface/type.go new file mode 100644 index 000000000..f2beef1f9 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/surface/type.go @@ -0,0 +1,51 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package surface_v1 + +func (t *Type) addField(f *Field) { + t.Fields = append(t.Fields, f) +} + +func (s *Type) HasFieldWithName(name string) bool { + return s.FieldWithName(name) != nil +} + +func (s *Type) FieldWithName(name string) *Field { + if s == nil || s.Fields == nil || name == "" { + return nil + } + for _, f := range s.Fields { + if f.FieldName == name { + return f + } + } + return nil +} + +func (s *Type) HasFieldWithPosition(position Position) bool { + return s.FieldWithPosition(position) != nil +} + +func (s *Type) FieldWithPosition(position Position) *Field { + if s == nil || s.Fields == nil { + return nil + } + for _, f := range s.Fields { + if f.Position == position { + return f + } + } + return nil +} diff --git a/vendor/github.com/googleapis/gnostic/test/README.md b/vendor/github.com/googleapis/gnostic/test/README.md new file mode 100644 index 000000000..236f378ab --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/test/README.md @@ -0,0 +1,3 @@ +# test + +This directory contains test data used to verify gnostic. diff --git a/vendor/github.com/googleapis/gnostic/test/errors/invalid-plugin-invocation.errors b/vendor/github.com/googleapis/gnostic/test/errors/invalid-plugin-invocation.errors new file mode 100644 index 000000000..685664aef --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/test/errors/invalid-plugin-invocation.errors @@ -0,0 +1,12 @@ +Errors reading examples/v2.0/yaml/petstore.yaml +Invalid invocation of gnostic-plugin: foo=bar,:abc +Errors reading examples/v2.0/yaml/petstore.yaml +Invalid invocation of gnostic-plugin: ,foo=bar:abc +Errors reading examples/v2.0/yaml/petstore.yaml +Invalid invocation of gnostic-plugin: foo=:abc +Errors reading examples/v2.0/yaml/petstore.yaml +Invalid invocation of gnostic-plugin: =bar:abc +Errors reading examples/v2.0/yaml/petstore.yaml +Invalid invocation of gnostic-plugin: ,,:abc +Errors reading examples/v2.0/yaml/petstore.yaml +Invalid invocation of gnostic-plugin: foo=bar=baz:abc diff --git a/vendor/github.com/googleapis/gnostic/test/errors/petstore-badproperties.errors b/vendor/github.com/googleapis/gnostic/test/errors/petstore-badproperties.errors new file mode 100644 index 000000000..11b8293e9 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/test/errors/petstore-badproperties.errors @@ -0,0 +1,16 @@ +Errors reading examples/errors/petstore-badproperties.yaml +ERROR $root.info is missing required property: version +ERROR $root.info has invalid property: myproperty +ERROR $root.paths./pets.get.parameters.parameter.bodyParameter is missing required property: schema +ERROR $root.paths./pets.get.parameters.parameter.bodyParameter has invalid properties: type, format, myproperty +ERROR $root.paths./pets.get.parameters.parameter.bodyParameter has unexpected value for in: query (string) +ERROR $root.paths./pets.get.parameters.parameter.nonBodyParameter.headerParameterSubSchema has invalid property: myproperty +ERROR $root.paths./pets.get.parameters.parameter.nonBodyParameter.headerParameterSubSchema has unexpected value for in: query (string) +ERROR $root.paths./pets.get.parameters.parameter.nonBodyParameter.formDataParameterSubSchema has invalid property: myproperty +ERROR $root.paths./pets.get.parameters.parameter.nonBodyParameter.formDataParameterSubSchema has unexpected value for in: query (string) +ERROR $root.paths./pets.get.parameters.parameter.nonBodyParameter.queryParameterSubSchema has invalid property: myproperty +ERROR $root.paths./pets.get.parameters.parameter.nonBodyParameter.pathParameterSubSchema has invalid property: myproperty +ERROR $root.paths./pets.get.parameters.parameter.nonBodyParameter.pathParameterSubSchema has unexpected value for in: query (string) +ERROR $root.paths./pets.get.parameters.jsonReference is missing required property: $ref +ERROR $root.paths./pets.get.parameters.jsonReference has invalid properties: name, in, required, type, format, myproperty +ERROR $root.paths./pets.post has unexpected value for tags: pets (string) \ No newline at end of file diff --git a/vendor/github.com/googleapis/gnostic/test/errors/petstore-missingversion.errors b/vendor/github.com/googleapis/gnostic/test/errors/petstore-missingversion.errors new file mode 100644 index 000000000..c61b034bb --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/test/errors/petstore-missingversion.errors @@ -0,0 +1,2 @@ +Errors reading examples/errors/petstore-missingversion.yaml +unable to identify OpenAPI version \ No newline at end of file diff --git a/vendor/github.com/googleapis/gnostic/test/errors/petstore-unresolvedrefs.errors b/vendor/github.com/googleapis/gnostic/test/errors/petstore-unresolvedrefs.errors new file mode 100644 index 000000000..dd8035b46 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/test/errors/petstore-unresolvedrefs.errors @@ -0,0 +1,3 @@ +Errors reading examples/errors/petstore-unresolvedrefs.yaml +ERROR could not resolve #/definitions/Pet +ERROR could not resolve #/definitions/Error \ No newline at end of file diff --git a/vendor/github.com/googleapis/gnostic/test/library-example-with-ext.json b/vendor/github.com/googleapis/gnostic/test/library-example-with-ext.json new file mode 100644 index 000000000..e6b7f2208 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/test/library-example-with-ext.json @@ -0,0 +1,96 @@ +{ + "swagger": "2.0", + "info": { + "title": "Google Example Library API", + "description": "A simple Google Example Library API.", + "version": "v1" + }, + "host": "library-example.googleapis.com", + + "x-sampleone-book" : {"code": 123, "message": 999}, + "x-sampleone-shelf" : {"foo1": 123, "bar": 999}, + + "x-sampleone-mysimplestring" : "hello world", + "x-sampleone-mysimpleint64" : 12345, + "x-sampleone-mysimplenumber" : 111.222, + "x-sampleone-mysimpleboolean" : true, + + "x-sampletwo-book" : {"code": 123, "message": 999}, + "x-sampletwo-shelf" : {"foo1": 123, "bar": 999}, + + "x-unhandled" : {"code": 123, "message": 999}, + + "parameters" : { + "paramAtSwaggerScope": { + "name": "paramAtSwaggerScope", + "in": "query", + "description": "test", + "required": true, + "type": "integer", + "format": "int32" + } + }, + "paths": { + "/v1/{sharedParameter}/pets": { + "post": { + "operationId": "simpleMethod", + "parameters": [ + { + "name": "myBodyRef", + "in": "body", + "schema": { + "properties": { + "myStringA": { + "type": "string" + } + } + } + }, + { + "name": "paramToOverride", + "in": "query", + "description": "New description", + "required": true, + "type": "string" + }, + { + "$ref": "#/parameters/paramAtSwaggerScope" + } + ], + "responses": { + "default": { + "description": "successful operation", + "schema": { + "$ref": "#/definitions/SimpleDef" + } + } + } + }, + "parameters": [ + { + "name": "sharedParameter", + "in": "path", + "description": "Shared parameter", + "required": true, + "type": "string" + }, + { + "name": "paramToOverride", + "in": "query", + "description": "Description to override", + "required": true, + "type": "string" + } + ] + } + }, + "definitions": { + "SimpleDef": { + "properties": { + "myStringA": { + "type": "string" + } + } + } + } +} diff --git a/vendor/github.com/googleapis/gnostic/test/library-example-with-ext.text.out b/vendor/github.com/googleapis/gnostic/test/library-example-with-ext.text.out new file mode 100644 index 000000000..b12e8f081 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/test/library-example-with-ext.text.out @@ -0,0 +1,234 @@ +swagger: "2.0" +info: < + title: "Google Example Library API" + version: "v1" + description: "A simple Google Example Library API." +> +host: "library-example.googleapis.com" +paths: < + path: < + name: "/v1/{sharedParameter}/pets" + value: < + post: < + operation_id: "simpleMethod" + parameters: < + parameter: < + body_parameter: < + name: "myBodyRef" + in: "body" + schema: < + properties: < + additional_properties: < + name: "myStringA" + value: < + type: < + value: "string" + > + > + > + > + > + > + > + > + parameters: < + parameter: < + non_body_parameter: < + query_parameter_sub_schema: < + required: true + in: "query" + description: "New description" + name: "paramToOverride" + type: "string" + > + > + > + > + parameters: < + parameter: < + non_body_parameter: < + query_parameter_sub_schema: < + required: true + in: "query" + description: "test" + name: "paramAtSwaggerScope" + type: "integer" + format: "int32" + > + > + > + > + responses: < + response_code: < + name: "default" + value: < + response: < + description: "successful operation" + schema: < + schema: < + properties: < + additional_properties: < + name: "myStringA" + value: < + type: < + value: "string" + > + > + > + > + > + > + > + > + > + > + > + parameters: < + parameter: < + non_body_parameter: < + path_parameter_sub_schema: < + required: true + in: "path" + description: "Shared parameter" + name: "sharedParameter" + type: "string" + > + > + > + > + parameters: < + parameter: < + non_body_parameter: < + query_parameter_sub_schema: < + required: true + in: "query" + description: "Description to override" + name: "paramToOverride" + type: "string" + > + > + > + > + > + > +> +definitions: < + additional_properties: < + name: "SimpleDef" + value: < + properties: < + additional_properties: < + name: "myStringA" + value: < + type: < + value: "string" + > + > + > + > + > + > +> +parameters: < + additional_properties: < + name: "paramAtSwaggerScope" + value: < + non_body_parameter: < + query_parameter_sub_schema: < + required: true + in: "query" + description: "test" + name: "paramAtSwaggerScope" + type: "integer" + format: "int32" + > + > + > + > +> +vendor_extension: < + name: "x-sampleone-book" + value: < + value: < + type_url: "type.googleapis.com/sampleone.Book" + value: "\010{\020\347\007" + > + yaml: "code: 123\nmessage: 999\n" + > +> +vendor_extension: < + name: "x-sampleone-shelf" + value: < + value: < + type_url: "type.googleapis.com/sampleone.Shelf" + value: "\010{\020\347\007" + > + yaml: "foo1: 123\nbar: 999\n" + > +> +vendor_extension: < + name: "x-sampleone-mysimplestring" + value: < + value: < + type_url: "type.googleapis.com/google.protobuf.StringValue" + value: "\n\013hello world" + > + yaml: "hello world\n" + > +> +vendor_extension: < + name: "x-sampleone-mysimpleint64" + value: < + value: < + type_url: "type.googleapis.com/google.protobuf.StringValue" + value: "\n\00512345" + > + yaml: "12345\n" + > +> +vendor_extension: < + name: "x-sampleone-mysimplenumber" + value: < + value: < + type_url: "type.googleapis.com/google.protobuf.DoubleValue" + value: "\t\221\355|?5\316[@" + > + yaml: "111.222\n" + > +> +vendor_extension: < + name: "x-sampleone-mysimpleboolean" + value: < + value: < + type_url: "type.googleapis.com/google.protobuf.BoolValue" + value: "\010\001" + > + yaml: "true\n" + > +> +vendor_extension: < + name: "x-sampletwo-book" + value: < + value: < + type_url: "type.googleapis.com/sampletwo.Book" + value: "\010{\020\347\007" + > + yaml: "code: 123\nmessage: 999\n" + > +> +vendor_extension: < + name: "x-sampletwo-shelf" + value: < + value: < + type_url: "type.googleapis.com/sampletwo.Shelf" + value: "\010{\020\347\007" + > + yaml: "foo1: 123\nbar: 999\n" + > +> +vendor_extension: < + name: "x-unhandled" + value: < + yaml: "code: 123\nmessage: 999\n" + > +> diff --git a/vendor/github.com/googleapis/gnostic/test/v2.0/petstore.text b/vendor/github.com/googleapis/gnostic/test/v2.0/petstore.text new file mode 100644 index 000000000..095e2fd0d --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/test/v2.0/petstore.text @@ -0,0 +1,387 @@ +swagger: "2.0" +info: < + title: "Swagger Petstore" + version: "1.0.0" + license: < + name: "MIT" + > +> +host: "petstore.swagger.io" +base_path: "/v1" +schemes: "http" +consumes: "application/json" +produces: "application/json" +paths: < + path: < + name: "/pets" + value: < + get: < + tags: "pets" + summary: "List all pets" + operation_id: "listPets" + parameters: < + parameter: < + non_body_parameter: < + query_parameter_sub_schema: < + in: "query" + description: "How many items to return at one time (max 100)" + name: "limit" + type: "integer" + format: "int32" + > + > + > + > + responses: < + response_code: < + name: "200" + value: < + response: < + description: "An paged array of pets" + schema: < + schema: < + type: < + value: "array" + > + items: < + schema: < + required: "id" + required: "name" + properties: < + additional_properties: < + name: "id" + value: < + format: "int64" + type: < + value: "integer" + > + > + > + additional_properties: < + name: "name" + value: < + type: < + value: "string" + > + > + > + additional_properties: < + name: "tag" + value: < + type: < + value: "string" + > + > + > + > + > + > + > + > + headers: < + additional_properties: < + name: "x-next" + value: < + type: "string" + description: "A link to the next page of responses" + > + > + > + > + > + > + response_code: < + name: "default" + value: < + response: < + description: "unexpected error" + schema: < + schema: < + required: "code" + required: "message" + properties: < + additional_properties: < + name: "code" + value: < + format: "int32" + type: < + value: "integer" + > + > + > + additional_properties: < + name: "message" + value: < + type: < + value: "string" + > + > + > + > + > + > + > + > + > + > + > + post: < + tags: "pets" + summary: "Create a pet" + operation_id: "createPets" + responses: < + response_code: < + name: "201" + value: < + response: < + description: "Null response" + > + > + > + response_code: < + name: "default" + value: < + response: < + description: "unexpected error" + schema: < + schema: < + required: "code" + required: "message" + properties: < + additional_properties: < + name: "code" + value: < + format: "int32" + type: < + value: "integer" + > + > + > + additional_properties: < + name: "message" + value: < + type: < + value: "string" + > + > + > + > + > + > + > + > + > + > + > + > + > + path: < + name: "/pets/{petId}" + value: < + get: < + tags: "pets" + summary: "Info for a specific pet" + operation_id: "showPetById" + parameters: < + parameter: < + non_body_parameter: < + path_parameter_sub_schema: < + required: true + in: "path" + description: "The id of the pet to retrieve" + name: "petId" + type: "string" + > + > + > + > + responses: < + response_code: < + name: "200" + value: < + response: < + description: "Expected response to a valid request" + schema: < + schema: < + type: < + value: "array" + > + items: < + schema: < + required: "id" + required: "name" + properties: < + additional_properties: < + name: "id" + value: < + format: "int64" + type: < + value: "integer" + > + > + > + additional_properties: < + name: "name" + value: < + type: < + value: "string" + > + > + > + additional_properties: < + name: "tag" + value: < + type: < + value: "string" + > + > + > + > + > + > + > + > + > + > + > + response_code: < + name: "default" + value: < + response: < + description: "unexpected error" + schema: < + schema: < + required: "code" + required: "message" + properties: < + additional_properties: < + name: "code" + value: < + format: "int32" + type: < + value: "integer" + > + > + > + additional_properties: < + name: "message" + value: < + type: < + value: "string" + > + > + > + > + > + > + > + > + > + > + > + > + > +> +definitions: < + additional_properties: < + name: "Pet" + value: < + required: "id" + required: "name" + properties: < + additional_properties: < + name: "id" + value: < + format: "int64" + type: < + value: "integer" + > + > + > + additional_properties: < + name: "name" + value: < + type: < + value: "string" + > + > + > + additional_properties: < + name: "tag" + value: < + type: < + value: "string" + > + > + > + > + > + > + additional_properties: < + name: "Pets" + value: < + type: < + value: "array" + > + items: < + schema: < + required: "id" + required: "name" + properties: < + additional_properties: < + name: "id" + value: < + format: "int64" + type: < + value: "integer" + > + > + > + additional_properties: < + name: "name" + value: < + type: < + value: "string" + > + > + > + additional_properties: < + name: "tag" + value: < + type: < + value: "string" + > + > + > + > + > + > + > + > + additional_properties: < + name: "Error" + value: < + required: "code" + required: "message" + properties: < + additional_properties: < + name: "code" + value: < + format: "int32" + type: < + value: "integer" + > + > + > + additional_properties: < + name: "message" + value: < + type: < + value: "string" + > + > + > + > + > + > +> diff --git a/vendor/github.com/googleapis/gnostic/test/v2.0/yaml/petstore-separate/spec/swagger.text b/vendor/github.com/googleapis/gnostic/test/v2.0/yaml/petstore-separate/spec/swagger.text new file mode 100644 index 000000000..1685891bd --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/test/v2.0/yaml/petstore-separate/spec/swagger.text @@ -0,0 +1,468 @@ +swagger: "2.0" +info: < + title: "Swagger Petstore" + version: "1.0.0" + description: "A sample API that uses a petstore as an example to demonstrate features in the swagger-2.0 specification" + terms_of_service: "http://helloreverb.com/terms/" + contact: < + name: "Wordnik API Team" + url: "http://madskristensen.net" + email: "foo@example.com" + > + license: < + name: "MIT" + url: "http://github.com/gruntjs/grunt/blob/master/LICENSE-MIT" + > +> +host: "petstore.swagger.wordnik.com" +base_path: "/api" +schemes: "http" +consumes: "application/json" +produces: "application/json" +paths: < + path: < + name: "/pets" + value: < + get: < + description: "Returns all pets from the system that the user has access to\nNam sed condimentum est. Maecenas tempor sagittis sapien, nec rhoncus sem sagittis sit amet. Aenean at gravida augue, ac iaculis sem. Curabitur odio lorem, ornare eget elementum nec, cursus id lectus. Duis mi turpis, pulvinar ac eros ac, tincidunt varius justo. In hac habitasse platea dictumst. Integer at adipiscing ante, a sagittis ligula. Aenean pharetra tempor ante molestie imperdiet. Vivamus id aliquam diam. Cras quis velit non tortor eleifend sagittis. Praesent at enim pharetra urna volutpat venenatis eget eget mauris. In eleifend fermentum facilisis. Praesent enim enim, gravida ac sodales sed, placerat id erat. Suspendisse lacus dolor, consectetur non augue vel, vehicula interdum libero. Morbi euismod sagittis libero sed lacinia.\n\nSed tempus felis lobortis leo pulvinar rutrum. Nam mattis velit nisl, eu condimentum ligula luctus nec. Phasellus semper velit eget aliquet faucibus. In a mattis elit. Phasellus vel urna viverra, condimentum lorem id, rhoncus nibh. Ut pellentesque posuere elementum. Sed a varius odio. Morbi rhoncus ligula libero, vel eleifend nunc tristique vitae. Fusce et sem dui. Aenean nec scelerisque tortor. Fusce malesuada accumsan magna vel tempus. Quisque mollis felis eu dolor tristique, sit amet auctor felis gravida. Sed libero lorem, molestie sed nisl in, accumsan tempor nisi. Fusce sollicitudin massa ut lacinia mattis. Sed vel eleifend lorem. Pellentesque vitae felis pretium, pulvinar elit eu, euismod sapien.\n" + operation_id: "findPets" + parameters: < + parameter: < + non_body_parameter: < + query_parameter_sub_schema: < + in: "query" + description: "tags to filter by" + name: "tags" + type: "array" + items: < + type: "string" + > + collection_format: "csv" + > + > + > + > + parameters: < + parameter: < + non_body_parameter: < + query_parameter_sub_schema: < + in: "query" + description: "maximum number of results to return" + name: "limit" + type: "integer" + format: "int32" + > + > + > + > + responses: < + response_code: < + name: "200" + value: < + response: < + description: "pet response" + schema: < + schema: < + type: < + value: "array" + > + items: < + schema: < + required: "id" + required: "name" + type: < + value: "object" + > + properties: < + additional_properties: < + name: "id" + value: < + format: "int64" + type: < + value: "integer" + > + > + > + additional_properties: < + name: "name" + value: < + type: < + value: "string" + > + > + > + additional_properties: < + name: "tag" + value: < + type: < + value: "string" + > + > + > + > + > + > + > + > + > + > + > + response_code: < + name: "default" + value: < + response: < + description: "unexpected error" + schema: < + schema: < + required: "code" + required: "message" + type: < + value: "object" + > + properties: < + additional_properties: < + name: "code" + value: < + format: "int32" + type: < + value: "integer" + > + > + > + additional_properties: < + name: "message" + value: < + type: < + value: "string" + > + > + > + > + > + > + > + > + > + > + > + post: < + description: "Creates a new pet in the store. Duplicates are allowed" + operation_id: "addPet" + parameters: < + parameter: < + body_parameter: < + description: "Pet to add to the store" + name: "pet" + in: "body" + required: true + schema: < + type: < + value: "object" + > + all_of: < + required: "id" + required: "name" + type: < + value: "object" + > + properties: < + additional_properties: < + name: "id" + value: < + format: "int64" + type: < + value: "integer" + > + > + > + additional_properties: < + name: "name" + value: < + type: < + value: "string" + > + > + > + additional_properties: < + name: "tag" + value: < + type: < + value: "string" + > + > + > + > + > + all_of: < + required: "name" + properties: < + additional_properties: < + name: "description" + value: < + format: "int64" + type: < + value: "integer" + > + > + > + > + > + > + > + > + > + responses: < + response_code: < + name: "200" + value: < + response: < + description: "pet response" + schema: < + schema: < + required: "id" + required: "name" + type: < + value: "object" + > + properties: < + additional_properties: < + name: "id" + value: < + format: "int64" + type: < + value: "integer" + > + > + > + additional_properties: < + name: "name" + value: < + type: < + value: "string" + > + > + > + additional_properties: < + name: "tag" + value: < + type: < + value: "string" + > + > + > + > + > + > + > + > + > + response_code: < + name: "default" + value: < + response: < + description: "unexpected error" + schema: < + schema: < + required: "code" + required: "message" + type: < + value: "object" + > + properties: < + additional_properties: < + name: "code" + value: < + format: "int32" + type: < + value: "integer" + > + > + > + additional_properties: < + name: "message" + value: < + type: < + value: "string" + > + > + > + > + > + > + > + > + > + > + > + > + > + path: < + name: "/pets/{id}" + value: < + get: < + description: "Returns a user based on a single ID, if the user does not have access to the pet" + operation_id: "find pet by id" + parameters: < + parameter: < + non_body_parameter: < + path_parameter_sub_schema: < + required: true + in: "path" + description: "ID of pet to fetch" + name: "id" + type: "integer" + format: "int64" + > + > + > + > + responses: < + response_code: < + name: "200" + value: < + response: < + description: "pet response" + schema: < + schema: < + required: "id" + required: "name" + type: < + value: "object" + > + properties: < + additional_properties: < + name: "id" + value: < + format: "int64" + type: < + value: "integer" + > + > + > + additional_properties: < + name: "name" + value: < + type: < + value: "string" + > + > + > + additional_properties: < + name: "tag" + value: < + type: < + value: "string" + > + > + > + > + > + > + > + > + > + response_code: < + name: "default" + value: < + response: < + description: "unexpected error" + schema: < + schema: < + required: "code" + required: "message" + type: < + value: "object" + > + properties: < + additional_properties: < + name: "code" + value: < + format: "int32" + type: < + value: "integer" + > + > + > + additional_properties: < + name: "message" + value: < + type: < + value: "string" + > + > + > + > + > + > + > + > + > + > + > + delete: < + description: "deletes a single pet based on the ID supplied" + operation_id: "deletePet" + parameters: < + parameter: < + non_body_parameter: < + path_parameter_sub_schema: < + required: true + in: "path" + description: "ID of pet to delete" + name: "id" + type: "integer" + format: "int64" + > + > + > + > + responses: < + response_code: < + name: "204" + value: < + response: < + description: "pet deleted" + > + > + > + response_code: < + name: "default" + value: < + response: < + description: "unexpected error" + schema: < + schema: < + required: "code" + required: "message" + type: < + value: "object" + > + properties: < + additional_properties: < + name: "code" + value: < + format: "int32" + type: < + value: "integer" + > + > + > + additional_properties: < + name: "message" + value: < + type: < + value: "string" + > + > + > + > + > + > + > + > + > + > + > + > + > +> diff --git a/vendor/github.com/googleapis/gnostic/test/v2.0/yaml/sample-petstore.out b/vendor/github.com/googleapis/gnostic/test/v2.0/yaml/sample-petstore.out new file mode 100644 index 000000000..38406d036 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/test/v2.0/yaml/sample-petstore.out @@ -0,0 +1,13 @@ + + +summary.txt -------------------- +Swagger: 2.0 +Host: petstore.swagger.io +BasePath: /v1 +Info: + Title: Swagger Petstore + Version: 1.0.0 +Paths: + GET /pets + POST /pets + GET /pets/{petId} diff --git a/vendor/github.com/googleapis/gnostic/test/v3.0/petstore.text b/vendor/github.com/googleapis/gnostic/test/v3.0/petstore.text new file mode 100644 index 000000000..14aa3950a --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/test/v3.0/petstore.text @@ -0,0 +1,268 @@ +openapi: "3.0" +info: < + title: "OpenAPI Petstore" + license: < + name: "MIT" + > + version: "1.0.0" +> +servers: < + url: "https://petstore.openapis.org/v1" + description: "Development server" +> +paths: < + path: < + name: "/pets" + value: < + get: < + tags: "pets" + summary: "List all pets" + operation_id: "listPets" + parameters: < + parameter: < + name: "limit" + in: "query" + description: "How many items to return at one time (max 100)" + schema: < + schema: < + type: "integer" + format: "int32" + > + > + > + > + responses: < + default: < + response: < + description: "unexpected error" + content: < + additional_properties: < + name: "application/json" + value: < + schema: < + reference: < + _ref: "#/components/schemas/Error" + > + > + > + > + > + > + > + response_or_reference: < + name: "200" + value: < + response: < + description: "An paged array of pets" + headers: < + additional_properties: < + name: "x-next" + value: < + header: < + description: "A link to the next page of responses" + schema: < + schema: < + type: "string" + > + > + > + > + > + > + content: < + additional_properties: < + name: "application/json" + value: < + schema: < + reference: < + _ref: "#/components/schemas/Pets" + > + > + > + > + > + > + > + > + > + > + post: < + tags: "pets" + summary: "Create a pet" + operation_id: "createPets" + responses: < + default: < + response: < + description: "unexpected error" + content: < + additional_properties: < + name: "application/json" + value: < + schema: < + reference: < + _ref: "#/components/schemas/Error" + > + > + > + > + > + > + > + response_or_reference: < + name: "201" + value: < + response: < + description: "Null response" + > + > + > + > + > + > + > + path: < + name: "/pets/{petId}" + value: < + get: < + tags: "pets" + summary: "Info for a specific pet" + operation_id: "showPetById" + parameters: < + parameter: < + name: "petId" + in: "path" + description: "The id of the pet to retrieve" + required: true + schema: < + schema: < + type: "string" + > + > + > + > + responses: < + default: < + response: < + description: "unexpected error" + content: < + additional_properties: < + name: "application/json" + value: < + schema: < + reference: < + _ref: "#/components/schemas/Error" + > + > + > + > + > + > + > + response_or_reference: < + name: "200" + value: < + response: < + description: "Expected response to a valid request" + content: < + additional_properties: < + name: "application/json" + value: < + schema: < + reference: < + _ref: "#/components/schemas/Pets" + > + > + > + > + > + > + > + > + > + > + > + > +> +components: < + schemas: < + additional_properties: < + name: "Pet" + value: < + schema: < + required: "id" + required: "name" + properties: < + additional_properties: < + name: "id" + value: < + schema: < + type: "integer" + format: "int64" + > + > + > + additional_properties: < + name: "name" + value: < + schema: < + type: "string" + > + > + > + additional_properties: < + name: "tag" + value: < + schema: < + type: "string" + > + > + > + > + > + > + > + additional_properties: < + name: "Pets" + value: < + schema: < + type: "array" + items: < + schema_or_reference: < + reference: < + _ref: "#/components/schemas/Pet" + > + > + > + > + > + > + additional_properties: < + name: "Error" + value: < + schema: < + required: "code" + required: "message" + properties: < + additional_properties: < + name: "code" + value: < + schema: < + type: "integer" + format: "int32" + > + > + > + additional_properties: < + name: "message" + value: < + schema: < + type: "string" + > + > + > + > + > + > + > + > +> diff --git a/vendor/github.com/googleapis/gnostic/tools/README.md b/vendor/github.com/googleapis/gnostic/tools/README.md new file mode 100644 index 000000000..f2ba116ff --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/tools/README.md @@ -0,0 +1,11 @@ +# tools + +This directory contains general utilities used by Gnostic and related programs. + +## j2y2j + +Converts JSON to YAML and YAML to JSON. + +## format-schema + +Formats a JSON schema canonically. diff --git a/vendor/github.com/googleapis/gnostic/tools/format-schema/main.go b/vendor/github.com/googleapis/gnostic/tools/format-schema/main.go new file mode 100644 index 000000000..85edbd79e --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/tools/format-schema/main.go @@ -0,0 +1,37 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// format-schema canonically formats a JSON schema. +package main + +import ( + "fmt" + "github.com/googleapis/gnostic/jsonschema" + "os" + "path" +) + +func main() { + if len(os.Args) != 2 { + fmt.Printf("Usage: %s [filename]\n", path.Base(os.Args[0])) + fmt.Printf("where [filename] is a path to a JSON schema to format.\n") + os.Exit(0) + } + schema, err := jsonschema.NewSchemaFromFile(os.Args[1]) + if err != nil { + panic(err) + } + output := schema.JSONString() + fmt.Printf("%s\n", output) +} diff --git a/vendor/github.com/googleapis/gnostic/tools/j2y2j/main.go b/vendor/github.com/googleapis/gnostic/tools/j2y2j/main.go new file mode 100644 index 000000000..d2fb04294 --- /dev/null +++ b/vendor/github.com/googleapis/gnostic/tools/j2y2j/main.go @@ -0,0 +1,60 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// j2y2j converts JSON to YAML and YAML to JSON. +package main + +import ( + "fmt" + "github.com/googleapis/gnostic/jsonschema" + "gopkg.in/yaml.v2" + "io/ioutil" + "os" + "path" +) + +func usage() { + fmt.Printf("Usage: %s [filename] [--json] [--yaml]\n", path.Base(os.Args[0])) + fmt.Printf("where [filename] is a path to a JSON or YAML file to convert\n") + fmt.Printf("and --json or --yaml indicates conversion to the corresponding format.\n") + os.Exit(0) +} + +func main() { + if len(os.Args) != 3 { + usage() + } + + filename := os.Args[1] + file, err := ioutil.ReadFile(filename) + if err != nil { + panic(err) + } + var info yaml.MapSlice + err = yaml.Unmarshal(file, &info) + + switch os.Args[2] { + case "--json": + result := jsonschema.Render(info) + fmt.Printf("%s", result) + case "--yaml": + result, err := yaml.Marshal(info) + if err != nil { + panic(err) + } + fmt.Printf("%s", string(result)) + default: + usage() + } +} diff --git a/vendor/github.com/gregjones/httpcache/.travis.yml b/vendor/github.com/gregjones/httpcache/.travis.yml new file mode 100644 index 000000000..b5ffbe03d --- /dev/null +++ b/vendor/github.com/gregjones/httpcache/.travis.yml @@ -0,0 +1,19 @@ +sudo: false +language: go +go: + - 1.6.x + - 1.7.x + - 1.8.x + - 1.9.x + - master +matrix: + allow_failures: + - go: master + fast_finish: true +install: + - # Do nothing. This is needed to prevent default install action "go get -t -v ./..." from happening here (we want it to happen inside script step). +script: + - go get -t -v ./... + - diff -u <(echo -n) <(gofmt -d .) + - go tool vet . + - go test -v -race ./... diff --git a/vendor/github.com/gregjones/httpcache/LICENSE.txt b/vendor/github.com/gregjones/httpcache/LICENSE.txt new file mode 100644 index 000000000..81316beb0 --- /dev/null +++ b/vendor/github.com/gregjones/httpcache/LICENSE.txt @@ -0,0 +1,7 @@ +Copyright © 2012 Greg Jones (greg.jones@gmail.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/vendor/github.com/gregjones/httpcache/README.md b/vendor/github.com/gregjones/httpcache/README.md new file mode 100644 index 000000000..eb2eae8ed --- /dev/null +++ b/vendor/github.com/gregjones/httpcache/README.md @@ -0,0 +1,25 @@ +httpcache +========= + +[![Build Status](https://travis-ci.org/gregjones/httpcache.svg?branch=master)](https://travis-ci.org/gregjones/httpcache) [![GoDoc](https://godoc.org/github.com/gregjones/httpcache?status.svg)](https://godoc.org/github.com/gregjones/httpcache) + +Package httpcache provides a http.RoundTripper implementation that works as a mostly RFC-compliant cache for http responses. + +It is only suitable for use as a 'private' cache (i.e. for a web-browser or an API-client and not for a shared proxy). + +Cache Backends +-------------- + +- The built-in 'memory' cache stores responses in an in-memory map. +- [`github.com/gregjones/httpcache/diskcache`](https://github.com/gregjones/httpcache/tree/master/diskcache) provides a filesystem-backed cache using the [diskv](https://github.com/peterbourgon/diskv) library. +- [`github.com/gregjones/httpcache/memcache`](https://github.com/gregjones/httpcache/tree/master/memcache) provides memcache implementations, for both App Engine and 'normal' memcache servers. +- [`sourcegraph.com/sourcegraph/s3cache`](https://sourcegraph.com/github.com/sourcegraph/s3cache) uses Amazon S3 for storage. +- [`github.com/gregjones/httpcache/leveldbcache`](https://github.com/gregjones/httpcache/tree/master/leveldbcache) provides a filesystem-backed cache using [leveldb](https://github.com/syndtr/goleveldb/leveldb). +- [`github.com/die-net/lrucache`](https://github.com/die-net/lrucache) provides an in-memory cache that will evict least-recently used entries. +- [`github.com/die-net/lrucache/twotier`](https://github.com/die-net/lrucache/tree/master/twotier) allows caches to be combined, for example to use lrucache above with a persistent disk-cache. +- [`github.com/birkelund/boltdbcache`](https://github.com/birkelund/boltdbcache) provides a BoltDB implementation (based on the [bbolt](https://github.com/coreos/bbolt) fork). + +License +------- + +- [MIT License](LICENSE.txt) diff --git a/vendor/github.com/gregjones/httpcache/diskcache/diskcache.go b/vendor/github.com/gregjones/httpcache/diskcache/diskcache.go new file mode 100644 index 000000000..42e3129d8 --- /dev/null +++ b/vendor/github.com/gregjones/httpcache/diskcache/diskcache.go @@ -0,0 +1,61 @@ +// Package diskcache provides an implementation of httpcache.Cache that uses the diskv package +// to supplement an in-memory map with persistent storage +// +package diskcache + +import ( + "bytes" + "crypto/md5" + "encoding/hex" + "github.com/peterbourgon/diskv" + "io" +) + +// Cache is an implementation of httpcache.Cache that supplements the in-memory map with persistent storage +type Cache struct { + d *diskv.Diskv +} + +// Get returns the response corresponding to key if present +func (c *Cache) Get(key string) (resp []byte, ok bool) { + key = keyToFilename(key) + resp, err := c.d.Read(key) + if err != nil { + return []byte{}, false + } + return resp, true +} + +// Set saves a response to the cache as key +func (c *Cache) Set(key string, resp []byte) { + key = keyToFilename(key) + c.d.WriteStream(key, bytes.NewReader(resp), true) +} + +// Delete removes the response with key from the cache +func (c *Cache) Delete(key string) { + key = keyToFilename(key) + c.d.Erase(key) +} + +func keyToFilename(key string) string { + h := md5.New() + io.WriteString(h, key) + return hex.EncodeToString(h.Sum(nil)) +} + +// New returns a new Cache that will store files in basePath +func New(basePath string) *Cache { + return &Cache{ + d: diskv.New(diskv.Options{ + BasePath: basePath, + CacheSizeMax: 100 * 1024 * 1024, // 100MB + }), + } +} + +// NewWithDiskv returns a new Cache using the provided Diskv as underlying +// storage. +func NewWithDiskv(d *diskv.Diskv) *Cache { + return &Cache{d} +} diff --git a/vendor/github.com/gregjones/httpcache/diskcache/diskcache_test.go b/vendor/github.com/gregjones/httpcache/diskcache/diskcache_test.go new file mode 100644 index 000000000..35c76cbd1 --- /dev/null +++ b/vendor/github.com/gregjones/httpcache/diskcache/diskcache_test.go @@ -0,0 +1,42 @@ +package diskcache + +import ( + "bytes" + "io/ioutil" + "os" + "testing" +) + +func TestDiskCache(t *testing.T) { + tempDir, err := ioutil.TempDir("", "httpcache") + if err != nil { + t.Fatalf("TempDir: %v", err) + } + defer os.RemoveAll(tempDir) + + cache := New(tempDir) + + key := "testKey" + _, ok := cache.Get(key) + if ok { + t.Fatal("retrieved key before adding it") + } + + val := []byte("some bytes") + cache.Set(key, val) + + retVal, ok := cache.Get(key) + if !ok { + t.Fatal("could not retrieve an element we just added") + } + if !bytes.Equal(retVal, val) { + t.Fatal("retrieved a different value than what we put in") + } + + cache.Delete(key) + + _, ok = cache.Get(key) + if ok { + t.Fatal("deleted key still present") + } +} diff --git a/vendor/github.com/gregjones/httpcache/httpcache.go b/vendor/github.com/gregjones/httpcache/httpcache.go new file mode 100644 index 000000000..f6a2ec4a5 --- /dev/null +++ b/vendor/github.com/gregjones/httpcache/httpcache.go @@ -0,0 +1,551 @@ +// Package httpcache provides a http.RoundTripper implementation that works as a +// mostly RFC-compliant cache for http responses. +// +// It is only suitable for use as a 'private' cache (i.e. for a web-browser or an API-client +// and not for a shared proxy). +// +package httpcache + +import ( + "bufio" + "bytes" + "errors" + "io" + "io/ioutil" + "net/http" + "net/http/httputil" + "strings" + "sync" + "time" +) + +const ( + stale = iota + fresh + transparent + // XFromCache is the header added to responses that are returned from the cache + XFromCache = "X-From-Cache" +) + +// A Cache interface is used by the Transport to store and retrieve responses. +type Cache interface { + // Get returns the []byte representation of a cached response and a bool + // set to true if the value isn't empty + Get(key string) (responseBytes []byte, ok bool) + // Set stores the []byte representation of a response against a key + Set(key string, responseBytes []byte) + // Delete removes the value associated with the key + Delete(key string) +} + +// cacheKey returns the cache key for req. +func cacheKey(req *http.Request) string { + if req.Method == http.MethodGet { + return req.URL.String() + } else { + return req.Method + " " + req.URL.String() + } +} + +// CachedResponse returns the cached http.Response for req if present, and nil +// otherwise. +func CachedResponse(c Cache, req *http.Request) (resp *http.Response, err error) { + cachedVal, ok := c.Get(cacheKey(req)) + if !ok { + return + } + + b := bytes.NewBuffer(cachedVal) + return http.ReadResponse(bufio.NewReader(b), req) +} + +// MemoryCache is an implemtation of Cache that stores responses in an in-memory map. +type MemoryCache struct { + mu sync.RWMutex + items map[string][]byte +} + +// Get returns the []byte representation of the response and true if present, false if not +func (c *MemoryCache) Get(key string) (resp []byte, ok bool) { + c.mu.RLock() + resp, ok = c.items[key] + c.mu.RUnlock() + return resp, ok +} + +// Set saves response resp to the cache with key +func (c *MemoryCache) Set(key string, resp []byte) { + c.mu.Lock() + c.items[key] = resp + c.mu.Unlock() +} + +// Delete removes key from the cache +func (c *MemoryCache) Delete(key string) { + c.mu.Lock() + delete(c.items, key) + c.mu.Unlock() +} + +// NewMemoryCache returns a new Cache that will store items in an in-memory map +func NewMemoryCache() *MemoryCache { + c := &MemoryCache{items: map[string][]byte{}} + return c +} + +// Transport is an implementation of http.RoundTripper that will return values from a cache +// where possible (avoiding a network request) and will additionally add validators (etag/if-modified-since) +// to repeated requests allowing servers to return 304 / Not Modified +type Transport struct { + // The RoundTripper interface actually used to make requests + // If nil, http.DefaultTransport is used + Transport http.RoundTripper + Cache Cache + // If true, responses returned from the cache will be given an extra header, X-From-Cache + MarkCachedResponses bool +} + +// NewTransport returns a new Transport with the +// provided Cache implementation and MarkCachedResponses set to true +func NewTransport(c Cache) *Transport { + return &Transport{Cache: c, MarkCachedResponses: true} +} + +// Client returns an *http.Client that caches responses. +func (t *Transport) Client() *http.Client { + return &http.Client{Transport: t} +} + +// varyMatches will return false unless all of the cached values for the headers listed in Vary +// match the new request +func varyMatches(cachedResp *http.Response, req *http.Request) bool { + for _, header := range headerAllCommaSepValues(cachedResp.Header, "vary") { + header = http.CanonicalHeaderKey(header) + if header != "" && req.Header.Get(header) != cachedResp.Header.Get("X-Varied-"+header) { + return false + } + } + return true +} + +// RoundTrip takes a Request and returns a Response +// +// If there is a fresh Response already in cache, then it will be returned without connecting to +// the server. +// +// If there is a stale Response, then any validators it contains will be set on the new request +// to give the server a chance to respond with NotModified. If this happens, then the cached Response +// will be returned. +func (t *Transport) RoundTrip(req *http.Request) (resp *http.Response, err error) { + cacheKey := cacheKey(req) + cacheable := (req.Method == "GET" || req.Method == "HEAD") && req.Header.Get("range") == "" + var cachedResp *http.Response + if cacheable { + cachedResp, err = CachedResponse(t.Cache, req) + } else { + // Need to invalidate an existing value + t.Cache.Delete(cacheKey) + } + + transport := t.Transport + if transport == nil { + transport = http.DefaultTransport + } + + if cacheable && cachedResp != nil && err == nil { + if t.MarkCachedResponses { + cachedResp.Header.Set(XFromCache, "1") + } + + if varyMatches(cachedResp, req) { + // Can only use cached value if the new request doesn't Vary significantly + freshness := getFreshness(cachedResp.Header, req.Header) + if freshness == fresh { + return cachedResp, nil + } + + if freshness == stale { + var req2 *http.Request + // Add validators if caller hasn't already done so + etag := cachedResp.Header.Get("etag") + if etag != "" && req.Header.Get("etag") == "" { + req2 = cloneRequest(req) + req2.Header.Set("if-none-match", etag) + } + lastModified := cachedResp.Header.Get("last-modified") + if lastModified != "" && req.Header.Get("last-modified") == "" { + if req2 == nil { + req2 = cloneRequest(req) + } + req2.Header.Set("if-modified-since", lastModified) + } + if req2 != nil { + req = req2 + } + } + } + + resp, err = transport.RoundTrip(req) + if err == nil && req.Method == "GET" && resp.StatusCode == http.StatusNotModified { + // Replace the 304 response with the one from cache, but update with some new headers + endToEndHeaders := getEndToEndHeaders(resp.Header) + for _, header := range endToEndHeaders { + cachedResp.Header[header] = resp.Header[header] + } + resp = cachedResp + } else if (err != nil || (cachedResp != nil && resp.StatusCode >= 500)) && + req.Method == "GET" && canStaleOnError(cachedResp.Header, req.Header) { + // In case of transport failure and stale-if-error activated, returns cached content + // when available + return cachedResp, nil + } else { + if err != nil || resp.StatusCode != http.StatusOK { + t.Cache.Delete(cacheKey) + } + if err != nil { + return nil, err + } + } + } else { + reqCacheControl := parseCacheControl(req.Header) + if _, ok := reqCacheControl["only-if-cached"]; ok { + resp = newGatewayTimeoutResponse(req) + } else { + resp, err = transport.RoundTrip(req) + if err != nil { + return nil, err + } + } + } + + if cacheable && canStore(parseCacheControl(req.Header), parseCacheControl(resp.Header)) { + for _, varyKey := range headerAllCommaSepValues(resp.Header, "vary") { + varyKey = http.CanonicalHeaderKey(varyKey) + fakeHeader := "X-Varied-" + varyKey + reqValue := req.Header.Get(varyKey) + if reqValue != "" { + resp.Header.Set(fakeHeader, reqValue) + } + } + switch req.Method { + case "GET": + // Delay caching until EOF is reached. + resp.Body = &cachingReadCloser{ + R: resp.Body, + OnEOF: func(r io.Reader) { + resp := *resp + resp.Body = ioutil.NopCloser(r) + respBytes, err := httputil.DumpResponse(&resp, true) + if err == nil { + t.Cache.Set(cacheKey, respBytes) + } + }, + } + default: + respBytes, err := httputil.DumpResponse(resp, true) + if err == nil { + t.Cache.Set(cacheKey, respBytes) + } + } + } else { + t.Cache.Delete(cacheKey) + } + return resp, nil +} + +// ErrNoDateHeader indicates that the HTTP headers contained no Date header. +var ErrNoDateHeader = errors.New("no Date header") + +// Date parses and returns the value of the Date header. +func Date(respHeaders http.Header) (date time.Time, err error) { + dateHeader := respHeaders.Get("date") + if dateHeader == "" { + err = ErrNoDateHeader + return + } + + return time.Parse(time.RFC1123, dateHeader) +} + +type realClock struct{} + +func (c *realClock) since(d time.Time) time.Duration { + return time.Since(d) +} + +type timer interface { + since(d time.Time) time.Duration +} + +var clock timer = &realClock{} + +// getFreshness will return one of fresh/stale/transparent based on the cache-control +// values of the request and the response +// +// fresh indicates the response can be returned +// stale indicates that the response needs validating before it is returned +// transparent indicates the response should not be used to fulfil the request +// +// Because this is only a private cache, 'public' and 'private' in cache-control aren't +// signficant. Similarly, smax-age isn't used. +func getFreshness(respHeaders, reqHeaders http.Header) (freshness int) { + respCacheControl := parseCacheControl(respHeaders) + reqCacheControl := parseCacheControl(reqHeaders) + if _, ok := reqCacheControl["no-cache"]; ok { + return transparent + } + if _, ok := respCacheControl["no-cache"]; ok { + return stale + } + if _, ok := reqCacheControl["only-if-cached"]; ok { + return fresh + } + + date, err := Date(respHeaders) + if err != nil { + return stale + } + currentAge := clock.since(date) + + var lifetime time.Duration + var zeroDuration time.Duration + + // If a response includes both an Expires header and a max-age directive, + // the max-age directive overrides the Expires header, even if the Expires header is more restrictive. + if maxAge, ok := respCacheControl["max-age"]; ok { + lifetime, err = time.ParseDuration(maxAge + "s") + if err != nil { + lifetime = zeroDuration + } + } else { + expiresHeader := respHeaders.Get("Expires") + if expiresHeader != "" { + expires, err := time.Parse(time.RFC1123, expiresHeader) + if err != nil { + lifetime = zeroDuration + } else { + lifetime = expires.Sub(date) + } + } + } + + if maxAge, ok := reqCacheControl["max-age"]; ok { + // the client is willing to accept a response whose age is no greater than the specified time in seconds + lifetime, err = time.ParseDuration(maxAge + "s") + if err != nil { + lifetime = zeroDuration + } + } + if minfresh, ok := reqCacheControl["min-fresh"]; ok { + // the client wants a response that will still be fresh for at least the specified number of seconds. + minfreshDuration, err := time.ParseDuration(minfresh + "s") + if err == nil { + currentAge = time.Duration(currentAge + minfreshDuration) + } + } + + if maxstale, ok := reqCacheControl["max-stale"]; ok { + // Indicates that the client is willing to accept a response that has exceeded its expiration time. + // If max-stale is assigned a value, then the client is willing to accept a response that has exceeded + // its expiration time by no more than the specified number of seconds. + // If no value is assigned to max-stale, then the client is willing to accept a stale response of any age. + // + // Responses served only because of a max-stale value are supposed to have a Warning header added to them, + // but that seems like a hassle, and is it actually useful? If so, then there needs to be a different + // return-value available here. + if maxstale == "" { + return fresh + } + maxstaleDuration, err := time.ParseDuration(maxstale + "s") + if err == nil { + currentAge = time.Duration(currentAge - maxstaleDuration) + } + } + + if lifetime > currentAge { + return fresh + } + + return stale +} + +// Returns true if either the request or the response includes the stale-if-error +// cache control extension: https://tools.ietf.org/html/rfc5861 +func canStaleOnError(respHeaders, reqHeaders http.Header) bool { + respCacheControl := parseCacheControl(respHeaders) + reqCacheControl := parseCacheControl(reqHeaders) + + var err error + lifetime := time.Duration(-1) + + if staleMaxAge, ok := respCacheControl["stale-if-error"]; ok { + if staleMaxAge != "" { + lifetime, err = time.ParseDuration(staleMaxAge + "s") + if err != nil { + return false + } + } else { + return true + } + } + if staleMaxAge, ok := reqCacheControl["stale-if-error"]; ok { + if staleMaxAge != "" { + lifetime, err = time.ParseDuration(staleMaxAge + "s") + if err != nil { + return false + } + } else { + return true + } + } + + if lifetime >= 0 { + date, err := Date(respHeaders) + if err != nil { + return false + } + currentAge := clock.since(date) + if lifetime > currentAge { + return true + } + } + + return false +} + +func getEndToEndHeaders(respHeaders http.Header) []string { + // These headers are always hop-by-hop + hopByHopHeaders := map[string]struct{}{ + "Connection": struct{}{}, + "Keep-Alive": struct{}{}, + "Proxy-Authenticate": struct{}{}, + "Proxy-Authorization": struct{}{}, + "Te": struct{}{}, + "Trailers": struct{}{}, + "Transfer-Encoding": struct{}{}, + "Upgrade": struct{}{}, + } + + for _, extra := range strings.Split(respHeaders.Get("connection"), ",") { + // any header listed in connection, if present, is also considered hop-by-hop + if strings.Trim(extra, " ") != "" { + hopByHopHeaders[http.CanonicalHeaderKey(extra)] = struct{}{} + } + } + endToEndHeaders := []string{} + for respHeader, _ := range respHeaders { + if _, ok := hopByHopHeaders[respHeader]; !ok { + endToEndHeaders = append(endToEndHeaders, respHeader) + } + } + return endToEndHeaders +} + +func canStore(reqCacheControl, respCacheControl cacheControl) (canStore bool) { + if _, ok := respCacheControl["no-store"]; ok { + return false + } + if _, ok := reqCacheControl["no-store"]; ok { + return false + } + return true +} + +func newGatewayTimeoutResponse(req *http.Request) *http.Response { + var braw bytes.Buffer + braw.WriteString("HTTP/1.1 504 Gateway Timeout\r\n\r\n") + resp, err := http.ReadResponse(bufio.NewReader(&braw), req) + if err != nil { + panic(err) + } + return resp +} + +// cloneRequest returns a clone of the provided *http.Request. +// The clone is a shallow copy of the struct and its Header map. +// (This function copyright goauth2 authors: https://code.google.com/p/goauth2) +func cloneRequest(r *http.Request) *http.Request { + // shallow copy of the struct + r2 := new(http.Request) + *r2 = *r + // deep copy of the Header + r2.Header = make(http.Header) + for k, s := range r.Header { + r2.Header[k] = s + } + return r2 +} + +type cacheControl map[string]string + +func parseCacheControl(headers http.Header) cacheControl { + cc := cacheControl{} + ccHeader := headers.Get("Cache-Control") + for _, part := range strings.Split(ccHeader, ",") { + part = strings.Trim(part, " ") + if part == "" { + continue + } + if strings.ContainsRune(part, '=') { + keyval := strings.Split(part, "=") + cc[strings.Trim(keyval[0], " ")] = strings.Trim(keyval[1], ",") + } else { + cc[part] = "" + } + } + return cc +} + +// headerAllCommaSepValues returns all comma-separated values (each +// with whitespace trimmed) for header name in headers. According to +// Section 4.2 of the HTTP/1.1 spec +// (http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.2), +// values from multiple occurrences of a header should be concatenated, if +// the header's value is a comma-separated list. +func headerAllCommaSepValues(headers http.Header, name string) []string { + var vals []string + for _, val := range headers[http.CanonicalHeaderKey(name)] { + fields := strings.Split(val, ",") + for i, f := range fields { + fields[i] = strings.TrimSpace(f) + } + vals = append(vals, fields...) + } + return vals +} + +// cachingReadCloser is a wrapper around ReadCloser R that calls OnEOF +// handler with a full copy of the content read from R when EOF is +// reached. +type cachingReadCloser struct { + // Underlying ReadCloser. + R io.ReadCloser + // OnEOF is called with a copy of the content of R when EOF is reached. + OnEOF func(io.Reader) + + buf bytes.Buffer // buf stores a copy of the content of R. +} + +// Read reads the next len(p) bytes from R or until R is drained. The +// return value n is the number of bytes read. If R has no data to +// return, err is io.EOF and OnEOF is called with a full copy of what +// has been read so far. +func (r *cachingReadCloser) Read(p []byte) (n int, err error) { + n, err = r.R.Read(p) + r.buf.Write(p[:n]) + if err == io.EOF { + r.OnEOF(bytes.NewReader(r.buf.Bytes())) + } + return n, err +} + +func (r *cachingReadCloser) Close() error { + return r.R.Close() +} + +// NewMemoryCacheTransport returns a new Transport using the in-memory cache implementation +func NewMemoryCacheTransport() *Transport { + c := NewMemoryCache() + t := NewTransport(c) + return t +} diff --git a/vendor/github.com/gregjones/httpcache/httpcache_test.go b/vendor/github.com/gregjones/httpcache/httpcache_test.go new file mode 100644 index 000000000..a50464180 --- /dev/null +++ b/vendor/github.com/gregjones/httpcache/httpcache_test.go @@ -0,0 +1,1475 @@ +package httpcache + +import ( + "bytes" + "errors" + "flag" + "io" + "io/ioutil" + "net/http" + "net/http/httptest" + "os" + "strconv" + "testing" + "time" +) + +var s struct { + server *httptest.Server + client http.Client + transport *Transport + done chan struct{} // Closed to unlock infinite handlers. +} + +type fakeClock struct { + elapsed time.Duration +} + +func (c *fakeClock) since(t time.Time) time.Duration { + return c.elapsed +} + +func TestMain(m *testing.M) { + flag.Parse() + setup() + code := m.Run() + teardown() + os.Exit(code) +} + +func setup() { + tp := NewMemoryCacheTransport() + client := http.Client{Transport: tp} + s.transport = tp + s.client = client + s.done = make(chan struct{}) + + mux := http.NewServeMux() + s.server = httptest.NewServer(mux) + + mux.HandleFunc("/", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Cache-Control", "max-age=3600") + })) + + mux.HandleFunc("/method", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Cache-Control", "max-age=3600") + w.Write([]byte(r.Method)) + })) + + mux.HandleFunc("/range", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + lm := "Fri, 14 Dec 2010 01:01:50 GMT" + if r.Header.Get("if-modified-since") == lm { + w.WriteHeader(http.StatusNotModified) + return + } + w.Header().Set("last-modified", lm) + if r.Header.Get("range") == "bytes=4-9" { + w.WriteHeader(http.StatusPartialContent) + w.Write([]byte(" text ")) + return + } + w.Write([]byte("Some text content")) + })) + + mux.HandleFunc("/nostore", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Cache-Control", "no-store") + })) + + mux.HandleFunc("/etag", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + etag := "124567" + if r.Header.Get("if-none-match") == etag { + w.WriteHeader(http.StatusNotModified) + return + } + w.Header().Set("etag", etag) + })) + + mux.HandleFunc("/lastmodified", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + lm := "Fri, 14 Dec 2010 01:01:50 GMT" + if r.Header.Get("if-modified-since") == lm { + w.WriteHeader(http.StatusNotModified) + return + } + w.Header().Set("last-modified", lm) + })) + + mux.HandleFunc("/varyaccept", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Cache-Control", "max-age=3600") + w.Header().Set("Content-Type", "text/plain") + w.Header().Set("Vary", "Accept") + w.Write([]byte("Some text content")) + })) + + mux.HandleFunc("/doublevary", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Cache-Control", "max-age=3600") + w.Header().Set("Content-Type", "text/plain") + w.Header().Set("Vary", "Accept, Accept-Language") + w.Write([]byte("Some text content")) + })) + mux.HandleFunc("/2varyheaders", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Cache-Control", "max-age=3600") + w.Header().Set("Content-Type", "text/plain") + w.Header().Add("Vary", "Accept") + w.Header().Add("Vary", "Accept-Language") + w.Write([]byte("Some text content")) + })) + mux.HandleFunc("/varyunused", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Cache-Control", "max-age=3600") + w.Header().Set("Content-Type", "text/plain") + w.Header().Set("Vary", "X-Madeup-Header") + w.Write([]byte("Some text content")) + })) + + mux.HandleFunc("/cachederror", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + etag := "abc" + if r.Header.Get("if-none-match") == etag { + w.WriteHeader(http.StatusNotModified) + return + } + w.Header().Set("etag", etag) + w.WriteHeader(http.StatusNotFound) + w.Write([]byte("Not found")) + })) + + updateFieldsCounter := 0 + mux.HandleFunc("/updatefields", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("X-Counter", strconv.Itoa(updateFieldsCounter)) + w.Header().Set("Etag", `"e"`) + updateFieldsCounter++ + if r.Header.Get("if-none-match") != "" { + w.WriteHeader(http.StatusNotModified) + return + } + w.Write([]byte("Some text content")) + })) + + // Take 3 seconds to return 200 OK (for testing client timeouts). + mux.HandleFunc("/3seconds", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + time.Sleep(3 * time.Second) + })) + + mux.HandleFunc("/infinite", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + for { + select { + case <-s.done: + return + default: + w.Write([]byte{0}) + } + } + })) +} + +func teardown() { + close(s.done) + s.server.Close() +} + +func resetTest() { + s.transport.Cache = NewMemoryCache() + clock = &realClock{} +} + +// TestCacheableMethod ensures that uncacheable method does not get stored +// in cache and get incorrectly used for a following cacheable method request. +func TestCacheableMethod(t *testing.T) { + resetTest() + { + req, err := http.NewRequest("POST", s.server.URL+"/method", nil) + if err != nil { + t.Fatal(err) + } + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + var buf bytes.Buffer + _, err = io.Copy(&buf, resp.Body) + if err != nil { + t.Fatal(err) + } + err = resp.Body.Close() + if err != nil { + t.Fatal(err) + } + if got, want := buf.String(), "POST"; got != want { + t.Errorf("got %q, want %q", got, want) + } + if resp.StatusCode != http.StatusOK { + t.Errorf("response status code isn't 200 OK: %v", resp.StatusCode) + } + } + { + req, err := http.NewRequest("GET", s.server.URL+"/method", nil) + if err != nil { + t.Fatal(err) + } + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + var buf bytes.Buffer + _, err = io.Copy(&buf, resp.Body) + if err != nil { + t.Fatal(err) + } + err = resp.Body.Close() + if err != nil { + t.Fatal(err) + } + if got, want := buf.String(), "GET"; got != want { + t.Errorf("got wrong body %q, want %q", got, want) + } + if resp.StatusCode != http.StatusOK { + t.Errorf("response status code isn't 200 OK: %v", resp.StatusCode) + } + if resp.Header.Get(XFromCache) != "" { + t.Errorf("XFromCache header isn't blank") + } + } +} + +func TestDontServeHeadResponseToGetRequest(t *testing.T) { + resetTest() + url := s.server.URL + "/" + req, err := http.NewRequest(http.MethodHead, url, nil) + if err != nil { + t.Fatal(err) + } + _, err = s.client.Do(req) + if err != nil { + t.Fatal(err) + } + req, err = http.NewRequest(http.MethodGet, url, nil) + if err != nil { + t.Fatal(err) + } + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + if resp.Header.Get(XFromCache) != "" { + t.Errorf("Cache should not match") + } +} + +func TestDontStorePartialRangeInCache(t *testing.T) { + resetTest() + { + req, err := http.NewRequest("GET", s.server.URL+"/range", nil) + if err != nil { + t.Fatal(err) + } + req.Header.Set("range", "bytes=4-9") + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + var buf bytes.Buffer + _, err = io.Copy(&buf, resp.Body) + if err != nil { + t.Fatal(err) + } + err = resp.Body.Close() + if err != nil { + t.Fatal(err) + } + if got, want := buf.String(), " text "; got != want { + t.Errorf("got %q, want %q", got, want) + } + if resp.StatusCode != http.StatusPartialContent { + t.Errorf("response status code isn't 206 Partial Content: %v", resp.StatusCode) + } + } + { + req, err := http.NewRequest("GET", s.server.URL+"/range", nil) + if err != nil { + t.Fatal(err) + } + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + var buf bytes.Buffer + _, err = io.Copy(&buf, resp.Body) + if err != nil { + t.Fatal(err) + } + err = resp.Body.Close() + if err != nil { + t.Fatal(err) + } + if got, want := buf.String(), "Some text content"; got != want { + t.Errorf("got %q, want %q", got, want) + } + if resp.StatusCode != http.StatusOK { + t.Errorf("response status code isn't 200 OK: %v", resp.StatusCode) + } + if resp.Header.Get(XFromCache) != "" { + t.Error("XFromCache header isn't blank") + } + } + { + req, err := http.NewRequest("GET", s.server.URL+"/range", nil) + if err != nil { + t.Fatal(err) + } + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + var buf bytes.Buffer + _, err = io.Copy(&buf, resp.Body) + if err != nil { + t.Fatal(err) + } + err = resp.Body.Close() + if err != nil { + t.Fatal(err) + } + if got, want := buf.String(), "Some text content"; got != want { + t.Errorf("got %q, want %q", got, want) + } + if resp.StatusCode != http.StatusOK { + t.Errorf("response status code isn't 200 OK: %v", resp.StatusCode) + } + if resp.Header.Get(XFromCache) != "1" { + t.Errorf(`XFromCache header isn't "1": %v`, resp.Header.Get(XFromCache)) + } + } + { + req, err := http.NewRequest("GET", s.server.URL+"/range", nil) + if err != nil { + t.Fatal(err) + } + req.Header.Set("range", "bytes=4-9") + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + var buf bytes.Buffer + _, err = io.Copy(&buf, resp.Body) + if err != nil { + t.Fatal(err) + } + err = resp.Body.Close() + if err != nil { + t.Fatal(err) + } + if got, want := buf.String(), " text "; got != want { + t.Errorf("got %q, want %q", got, want) + } + if resp.StatusCode != http.StatusPartialContent { + t.Errorf("response status code isn't 206 Partial Content: %v", resp.StatusCode) + } + } +} + +func TestCacheOnlyIfBodyRead(t *testing.T) { + resetTest() + { + req, err := http.NewRequest("GET", s.server.URL, nil) + if err != nil { + t.Fatal(err) + } + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + if resp.Header.Get(XFromCache) != "" { + t.Fatal("XFromCache header isn't blank") + } + // We do not read the body + resp.Body.Close() + } + { + req, err := http.NewRequest("GET", s.server.URL, nil) + if err != nil { + t.Fatal(err) + } + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + if resp.Header.Get(XFromCache) != "" { + t.Fatalf("XFromCache header isn't blank") + } + } +} + +func TestOnlyReadBodyOnDemand(t *testing.T) { + resetTest() + + req, err := http.NewRequest("GET", s.server.URL+"/infinite", nil) + if err != nil { + t.Fatal(err) + } + resp, err := s.client.Do(req) // This shouldn't hang forever. + if err != nil { + t.Fatal(err) + } + buf := make([]byte, 10) // Only partially read the body. + _, err = resp.Body.Read(buf) + if err != nil { + t.Fatal(err) + } + resp.Body.Close() +} + +func TestGetOnlyIfCachedHit(t *testing.T) { + resetTest() + { + req, err := http.NewRequest("GET", s.server.URL, nil) + if err != nil { + t.Fatal(err) + } + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + if resp.Header.Get(XFromCache) != "" { + t.Fatal("XFromCache header isn't blank") + } + _, err = ioutil.ReadAll(resp.Body) + if err != nil { + t.Fatal(err) + } + } + { + req, err := http.NewRequest("GET", s.server.URL, nil) + if err != nil { + t.Fatal(err) + } + req.Header.Add("cache-control", "only-if-cached") + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + if resp.Header.Get(XFromCache) != "1" { + t.Fatalf(`XFromCache header isn't "1": %v`, resp.Header.Get(XFromCache)) + } + if resp.StatusCode != http.StatusOK { + t.Fatalf("response status code isn't 200 OK: %v", resp.StatusCode) + } + } +} + +func TestGetOnlyIfCachedMiss(t *testing.T) { + resetTest() + req, err := http.NewRequest("GET", s.server.URL, nil) + if err != nil { + t.Fatal(err) + } + req.Header.Add("cache-control", "only-if-cached") + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + if resp.Header.Get(XFromCache) != "" { + t.Fatal("XFromCache header isn't blank") + } + if resp.StatusCode != http.StatusGatewayTimeout { + t.Fatalf("response status code isn't 504 GatewayTimeout: %v", resp.StatusCode) + } +} + +func TestGetNoStoreRequest(t *testing.T) { + resetTest() + req, err := http.NewRequest("GET", s.server.URL, nil) + if err != nil { + t.Fatal(err) + } + req.Header.Add("Cache-Control", "no-store") + { + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + if resp.Header.Get(XFromCache) != "" { + t.Fatal("XFromCache header isn't blank") + } + } + { + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + if resp.Header.Get(XFromCache) != "" { + t.Fatal("XFromCache header isn't blank") + } + } +} + +func TestGetNoStoreResponse(t *testing.T) { + resetTest() + req, err := http.NewRequest("GET", s.server.URL+"/nostore", nil) + if err != nil { + t.Fatal(err) + } + { + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + if resp.Header.Get(XFromCache) != "" { + t.Fatal("XFromCache header isn't blank") + } + } + { + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + if resp.Header.Get(XFromCache) != "" { + t.Fatal("XFromCache header isn't blank") + } + } +} + +func TestGetWithEtag(t *testing.T) { + resetTest() + req, err := http.NewRequest("GET", s.server.URL+"/etag", nil) + if err != nil { + t.Fatal(err) + } + { + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + if resp.Header.Get(XFromCache) != "" { + t.Fatal("XFromCache header isn't blank") + } + _, err = ioutil.ReadAll(resp.Body) + if err != nil { + t.Fatal(err) + } + + } + { + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + if resp.Header.Get(XFromCache) != "1" { + t.Fatalf(`XFromCache header isn't "1": %v`, resp.Header.Get(XFromCache)) + } + // additional assertions to verify that 304 response is converted properly + if resp.StatusCode != http.StatusOK { + t.Fatalf("response status code isn't 200 OK: %v", resp.StatusCode) + } + if _, ok := resp.Header["Connection"]; ok { + t.Fatalf("Connection header isn't absent") + } + } +} + +func TestGetWithLastModified(t *testing.T) { + resetTest() + req, err := http.NewRequest("GET", s.server.URL+"/lastmodified", nil) + if err != nil { + t.Fatal(err) + } + { + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + if resp.Header.Get(XFromCache) != "" { + t.Fatal("XFromCache header isn't blank") + } + _, err = ioutil.ReadAll(resp.Body) + if err != nil { + t.Fatal(err) + } + } + { + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + if resp.Header.Get(XFromCache) != "1" { + t.Fatalf(`XFromCache header isn't "1": %v`, resp.Header.Get(XFromCache)) + } + } +} + +func TestGetWithVary(t *testing.T) { + resetTest() + req, err := http.NewRequest("GET", s.server.URL+"/varyaccept", nil) + if err != nil { + t.Fatal(err) + } + req.Header.Set("Accept", "text/plain") + { + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + if resp.Header.Get("Vary") != "Accept" { + t.Fatalf(`Vary header isn't "Accept": %v`, resp.Header.Get("Vary")) + } + _, err = ioutil.ReadAll(resp.Body) + if err != nil { + t.Fatal(err) + } + } + { + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + if resp.Header.Get(XFromCache) != "1" { + t.Fatalf(`XFromCache header isn't "1": %v`, resp.Header.Get(XFromCache)) + } + } + req.Header.Set("Accept", "text/html") + { + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + if resp.Header.Get(XFromCache) != "" { + t.Fatal("XFromCache header isn't blank") + } + } + req.Header.Set("Accept", "") + { + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + if resp.Header.Get(XFromCache) != "" { + t.Fatal("XFromCache header isn't blank") + } + } +} + +func TestGetWithDoubleVary(t *testing.T) { + resetTest() + req, err := http.NewRequest("GET", s.server.URL+"/doublevary", nil) + if err != nil { + t.Fatal(err) + } + req.Header.Set("Accept", "text/plain") + req.Header.Set("Accept-Language", "da, en-gb;q=0.8, en;q=0.7") + { + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + if resp.Header.Get("Vary") == "" { + t.Fatalf(`Vary header is blank`) + } + _, err = ioutil.ReadAll(resp.Body) + if err != nil { + t.Fatal(err) + } + } + { + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + if resp.Header.Get(XFromCache) != "1" { + t.Fatalf(`XFromCache header isn't "1": %v`, resp.Header.Get(XFromCache)) + } + } + req.Header.Set("Accept-Language", "") + { + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + if resp.Header.Get(XFromCache) != "" { + t.Fatal("XFromCache header isn't blank") + } + } + req.Header.Set("Accept-Language", "da") + { + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + if resp.Header.Get(XFromCache) != "" { + t.Fatal("XFromCache header isn't blank") + } + } +} + +func TestGetWith2VaryHeaders(t *testing.T) { + resetTest() + // Tests that multiple Vary headers' comma-separated lists are + // merged. See https://github.com/gregjones/httpcache/issues/27. + const ( + accept = "text/plain" + acceptLanguage = "da, en-gb;q=0.8, en;q=0.7" + ) + req, err := http.NewRequest("GET", s.server.URL+"/2varyheaders", nil) + if err != nil { + t.Fatal(err) + } + req.Header.Set("Accept", accept) + req.Header.Set("Accept-Language", acceptLanguage) + { + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + if resp.Header.Get("Vary") == "" { + t.Fatalf(`Vary header is blank`) + } + _, err = ioutil.ReadAll(resp.Body) + if err != nil { + t.Fatal(err) + } + } + { + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + if resp.Header.Get(XFromCache) != "1" { + t.Fatalf(`XFromCache header isn't "1": %v`, resp.Header.Get(XFromCache)) + } + } + req.Header.Set("Accept-Language", "") + { + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + if resp.Header.Get(XFromCache) != "" { + t.Fatal("XFromCache header isn't blank") + } + } + req.Header.Set("Accept-Language", "da") + { + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + if resp.Header.Get(XFromCache) != "" { + t.Fatal("XFromCache header isn't blank") + } + } + req.Header.Set("Accept-Language", acceptLanguage) + req.Header.Set("Accept", "") + { + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + if resp.Header.Get(XFromCache) != "" { + t.Fatal("XFromCache header isn't blank") + } + } + req.Header.Set("Accept", "image/png") + { + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + if resp.Header.Get(XFromCache) != "" { + t.Fatal("XFromCache header isn't blank") + } + _, err = ioutil.ReadAll(resp.Body) + if err != nil { + t.Fatal(err) + } + } + { + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + if resp.Header.Get(XFromCache) != "1" { + t.Fatalf(`XFromCache header isn't "1": %v`, resp.Header.Get(XFromCache)) + } + } +} + +func TestGetVaryUnused(t *testing.T) { + resetTest() + req, err := http.NewRequest("GET", s.server.URL+"/varyunused", nil) + if err != nil { + t.Fatal(err) + } + req.Header.Set("Accept", "text/plain") + { + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + if resp.Header.Get("Vary") == "" { + t.Fatalf(`Vary header is blank`) + } + _, err = ioutil.ReadAll(resp.Body) + if err != nil { + t.Fatal(err) + } + } + { + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + if resp.Header.Get(XFromCache) != "1" { + t.Fatalf(`XFromCache header isn't "1": %v`, resp.Header.Get(XFromCache)) + } + } +} + +func TestUpdateFields(t *testing.T) { + resetTest() + req, err := http.NewRequest("GET", s.server.URL+"/updatefields", nil) + if err != nil { + t.Fatal(err) + } + var counter, counter2 string + { + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + counter = resp.Header.Get("x-counter") + _, err = ioutil.ReadAll(resp.Body) + if err != nil { + t.Fatal(err) + } + } + { + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + if resp.Header.Get(XFromCache) != "1" { + t.Fatalf(`XFromCache header isn't "1": %v`, resp.Header.Get(XFromCache)) + } + counter2 = resp.Header.Get("x-counter") + } + if counter == counter2 { + t.Fatalf(`both "x-counter" values are equal: %v %v`, counter, counter2) + } +} + +// This tests the fix for https://github.com/gregjones/httpcache/issues/74. +// Previously, after validating a cached response, its StatusCode +// was incorrectly being replaced. +func TestCachedErrorsKeepStatus(t *testing.T) { + resetTest() + req, err := http.NewRequest("GET", s.server.URL+"/cachederror", nil) + if err != nil { + t.Fatal(err) + } + { + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + io.Copy(ioutil.Discard, resp.Body) + } + { + resp, err := s.client.Do(req) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + if resp.StatusCode != http.StatusNotFound { + t.Fatalf("Status code isn't 404: %d", resp.StatusCode) + } + } +} + +func TestParseCacheControl(t *testing.T) { + resetTest() + h := http.Header{} + for range parseCacheControl(h) { + t.Fatal("cacheControl should be empty") + } + + h.Set("cache-control", "no-cache") + { + cc := parseCacheControl(h) + if _, ok := cc["foo"]; ok { + t.Error(`Value "foo" shouldn't exist`) + } + noCache, ok := cc["no-cache"] + if !ok { + t.Fatalf(`"no-cache" value isn't set`) + } + if noCache != "" { + t.Fatalf(`"no-cache" value isn't blank: %v`, noCache) + } + } + h.Set("cache-control", "no-cache, max-age=3600") + { + cc := parseCacheControl(h) + noCache, ok := cc["no-cache"] + if !ok { + t.Fatalf(`"no-cache" value isn't set`) + } + if noCache != "" { + t.Fatalf(`"no-cache" value isn't blank: %v`, noCache) + } + if cc["max-age"] != "3600" { + t.Fatalf(`"max-age" value isn't "3600": %v`, cc["max-age"]) + } + } +} + +func TestNoCacheRequestExpiration(t *testing.T) { + resetTest() + respHeaders := http.Header{} + respHeaders.Set("Cache-Control", "max-age=7200") + + reqHeaders := http.Header{} + reqHeaders.Set("Cache-Control", "no-cache") + if getFreshness(respHeaders, reqHeaders) != transparent { + t.Fatal("freshness isn't transparent") + } +} + +func TestNoCacheResponseExpiration(t *testing.T) { + resetTest() + respHeaders := http.Header{} + respHeaders.Set("Cache-Control", "no-cache") + respHeaders.Set("Expires", "Wed, 19 Apr 3000 11:43:00 GMT") + + reqHeaders := http.Header{} + if getFreshness(respHeaders, reqHeaders) != stale { + t.Fatal("freshness isn't stale") + } +} + +func TestReqMustRevalidate(t *testing.T) { + resetTest() + // not paying attention to request setting max-stale means never returning stale + // responses, so always acting as if must-revalidate is set + respHeaders := http.Header{} + + reqHeaders := http.Header{} + reqHeaders.Set("Cache-Control", "must-revalidate") + if getFreshness(respHeaders, reqHeaders) != stale { + t.Fatal("freshness isn't stale") + } +} + +func TestRespMustRevalidate(t *testing.T) { + resetTest() + respHeaders := http.Header{} + respHeaders.Set("Cache-Control", "must-revalidate") + + reqHeaders := http.Header{} + if getFreshness(respHeaders, reqHeaders) != stale { + t.Fatal("freshness isn't stale") + } +} + +func TestFreshExpiration(t *testing.T) { + resetTest() + now := time.Now() + respHeaders := http.Header{} + respHeaders.Set("date", now.Format(time.RFC1123)) + respHeaders.Set("expires", now.Add(time.Duration(2)*time.Second).Format(time.RFC1123)) + + reqHeaders := http.Header{} + if getFreshness(respHeaders, reqHeaders) != fresh { + t.Fatal("freshness isn't fresh") + } + + clock = &fakeClock{elapsed: 3 * time.Second} + if getFreshness(respHeaders, reqHeaders) != stale { + t.Fatal("freshness isn't stale") + } +} + +func TestMaxAge(t *testing.T) { + resetTest() + now := time.Now() + respHeaders := http.Header{} + respHeaders.Set("date", now.Format(time.RFC1123)) + respHeaders.Set("cache-control", "max-age=2") + + reqHeaders := http.Header{} + if getFreshness(respHeaders, reqHeaders) != fresh { + t.Fatal("freshness isn't fresh") + } + + clock = &fakeClock{elapsed: 3 * time.Second} + if getFreshness(respHeaders, reqHeaders) != stale { + t.Fatal("freshness isn't stale") + } +} + +func TestMaxAgeZero(t *testing.T) { + resetTest() + now := time.Now() + respHeaders := http.Header{} + respHeaders.Set("date", now.Format(time.RFC1123)) + respHeaders.Set("cache-control", "max-age=0") + + reqHeaders := http.Header{} + if getFreshness(respHeaders, reqHeaders) != stale { + t.Fatal("freshness isn't stale") + } +} + +func TestBothMaxAge(t *testing.T) { + resetTest() + now := time.Now() + respHeaders := http.Header{} + respHeaders.Set("date", now.Format(time.RFC1123)) + respHeaders.Set("cache-control", "max-age=2") + + reqHeaders := http.Header{} + reqHeaders.Set("cache-control", "max-age=0") + if getFreshness(respHeaders, reqHeaders) != stale { + t.Fatal("freshness isn't stale") + } +} + +func TestMinFreshWithExpires(t *testing.T) { + resetTest() + now := time.Now() + respHeaders := http.Header{} + respHeaders.Set("date", now.Format(time.RFC1123)) + respHeaders.Set("expires", now.Add(time.Duration(2)*time.Second).Format(time.RFC1123)) + + reqHeaders := http.Header{} + reqHeaders.Set("cache-control", "min-fresh=1") + if getFreshness(respHeaders, reqHeaders) != fresh { + t.Fatal("freshness isn't fresh") + } + + reqHeaders = http.Header{} + reqHeaders.Set("cache-control", "min-fresh=2") + if getFreshness(respHeaders, reqHeaders) != stale { + t.Fatal("freshness isn't stale") + } +} + +func TestEmptyMaxStale(t *testing.T) { + resetTest() + now := time.Now() + respHeaders := http.Header{} + respHeaders.Set("date", now.Format(time.RFC1123)) + respHeaders.Set("cache-control", "max-age=20") + + reqHeaders := http.Header{} + reqHeaders.Set("cache-control", "max-stale") + clock = &fakeClock{elapsed: 10 * time.Second} + if getFreshness(respHeaders, reqHeaders) != fresh { + t.Fatal("freshness isn't fresh") + } + + clock = &fakeClock{elapsed: 60 * time.Second} + if getFreshness(respHeaders, reqHeaders) != fresh { + t.Fatal("freshness isn't fresh") + } +} + +func TestMaxStaleValue(t *testing.T) { + resetTest() + now := time.Now() + respHeaders := http.Header{} + respHeaders.Set("date", now.Format(time.RFC1123)) + respHeaders.Set("cache-control", "max-age=10") + + reqHeaders := http.Header{} + reqHeaders.Set("cache-control", "max-stale=20") + clock = &fakeClock{elapsed: 5 * time.Second} + if getFreshness(respHeaders, reqHeaders) != fresh { + t.Fatal("freshness isn't fresh") + } + + clock = &fakeClock{elapsed: 15 * time.Second} + if getFreshness(respHeaders, reqHeaders) != fresh { + t.Fatal("freshness isn't fresh") + } + + clock = &fakeClock{elapsed: 30 * time.Second} + if getFreshness(respHeaders, reqHeaders) != stale { + t.Fatal("freshness isn't stale") + } +} + +func containsHeader(headers []string, header string) bool { + for _, v := range headers { + if http.CanonicalHeaderKey(v) == http.CanonicalHeaderKey(header) { + return true + } + } + return false +} + +func TestGetEndToEndHeaders(t *testing.T) { + resetTest() + var ( + headers http.Header + end2end []string + ) + + headers = http.Header{} + headers.Set("content-type", "text/html") + headers.Set("te", "deflate") + + end2end = getEndToEndHeaders(headers) + if !containsHeader(end2end, "content-type") { + t.Fatal(`doesn't contain "content-type" header`) + } + if containsHeader(end2end, "te") { + t.Fatal(`doesn't contain "te" header`) + } + + headers = http.Header{} + headers.Set("connection", "content-type") + headers.Set("content-type", "text/csv") + headers.Set("te", "deflate") + end2end = getEndToEndHeaders(headers) + if containsHeader(end2end, "connection") { + t.Fatal(`doesn't contain "connection" header`) + } + if containsHeader(end2end, "content-type") { + t.Fatal(`doesn't contain "content-type" header`) + } + if containsHeader(end2end, "te") { + t.Fatal(`doesn't contain "te" header`) + } + + headers = http.Header{} + end2end = getEndToEndHeaders(headers) + if len(end2end) != 0 { + t.Fatal(`non-zero end2end headers`) + } + + headers = http.Header{} + headers.Set("connection", "content-type") + end2end = getEndToEndHeaders(headers) + if len(end2end) != 0 { + t.Fatal(`non-zero end2end headers`) + } +} + +type transportMock struct { + response *http.Response + err error +} + +func (t transportMock) RoundTrip(req *http.Request) (resp *http.Response, err error) { + return t.response, t.err +} + +func TestStaleIfErrorRequest(t *testing.T) { + resetTest() + now := time.Now() + tmock := transportMock{ + response: &http.Response{ + Status: http.StatusText(http.StatusOK), + StatusCode: http.StatusOK, + Header: http.Header{ + "Date": []string{now.Format(time.RFC1123)}, + "Cache-Control": []string{"no-cache"}, + }, + Body: ioutil.NopCloser(bytes.NewBuffer([]byte("some data"))), + }, + err: nil, + } + tp := NewMemoryCacheTransport() + tp.Transport = &tmock + + // First time, response is cached on success + r, _ := http.NewRequest("GET", "http://somewhere.com/", nil) + r.Header.Set("Cache-Control", "stale-if-error") + resp, err := tp.RoundTrip(r) + if err != nil { + t.Fatal(err) + } + if resp == nil { + t.Fatal("resp is nil") + } + _, err = ioutil.ReadAll(resp.Body) + if err != nil { + t.Fatal(err) + } + + // On failure, response is returned from the cache + tmock.response = nil + tmock.err = errors.New("some error") + resp, err = tp.RoundTrip(r) + if err != nil { + t.Fatal(err) + } + if resp == nil { + t.Fatal("resp is nil") + } +} + +func TestStaleIfErrorRequestLifetime(t *testing.T) { + resetTest() + now := time.Now() + tmock := transportMock{ + response: &http.Response{ + Status: http.StatusText(http.StatusOK), + StatusCode: http.StatusOK, + Header: http.Header{ + "Date": []string{now.Format(time.RFC1123)}, + "Cache-Control": []string{"no-cache"}, + }, + Body: ioutil.NopCloser(bytes.NewBuffer([]byte("some data"))), + }, + err: nil, + } + tp := NewMemoryCacheTransport() + tp.Transport = &tmock + + // First time, response is cached on success + r, _ := http.NewRequest("GET", "http://somewhere.com/", nil) + r.Header.Set("Cache-Control", "stale-if-error=100") + resp, err := tp.RoundTrip(r) + if err != nil { + t.Fatal(err) + } + if resp == nil { + t.Fatal("resp is nil") + } + _, err = ioutil.ReadAll(resp.Body) + if err != nil { + t.Fatal(err) + } + + // On failure, response is returned from the cache + tmock.response = nil + tmock.err = errors.New("some error") + resp, err = tp.RoundTrip(r) + if err != nil { + t.Fatal(err) + } + if resp == nil { + t.Fatal("resp is nil") + } + + // Same for http errors + tmock.response = &http.Response{StatusCode: http.StatusInternalServerError} + tmock.err = nil + resp, err = tp.RoundTrip(r) + if err != nil { + t.Fatal(err) + } + if resp == nil { + t.Fatal("resp is nil") + } + + // If failure last more than max stale, error is returned + clock = &fakeClock{elapsed: 200 * time.Second} + _, err = tp.RoundTrip(r) + if err != tmock.err { + t.Fatalf("got err %v, want %v", err, tmock.err) + } +} + +func TestStaleIfErrorResponse(t *testing.T) { + resetTest() + now := time.Now() + tmock := transportMock{ + response: &http.Response{ + Status: http.StatusText(http.StatusOK), + StatusCode: http.StatusOK, + Header: http.Header{ + "Date": []string{now.Format(time.RFC1123)}, + "Cache-Control": []string{"no-cache, stale-if-error"}, + }, + Body: ioutil.NopCloser(bytes.NewBuffer([]byte("some data"))), + }, + err: nil, + } + tp := NewMemoryCacheTransport() + tp.Transport = &tmock + + // First time, response is cached on success + r, _ := http.NewRequest("GET", "http://somewhere.com/", nil) + resp, err := tp.RoundTrip(r) + if err != nil { + t.Fatal(err) + } + if resp == nil { + t.Fatal("resp is nil") + } + _, err = ioutil.ReadAll(resp.Body) + if err != nil { + t.Fatal(err) + } + + // On failure, response is returned from the cache + tmock.response = nil + tmock.err = errors.New("some error") + resp, err = tp.RoundTrip(r) + if err != nil { + t.Fatal(err) + } + if resp == nil { + t.Fatal("resp is nil") + } +} + +func TestStaleIfErrorResponseLifetime(t *testing.T) { + resetTest() + now := time.Now() + tmock := transportMock{ + response: &http.Response{ + Status: http.StatusText(http.StatusOK), + StatusCode: http.StatusOK, + Header: http.Header{ + "Date": []string{now.Format(time.RFC1123)}, + "Cache-Control": []string{"no-cache, stale-if-error=100"}, + }, + Body: ioutil.NopCloser(bytes.NewBuffer([]byte("some data"))), + }, + err: nil, + } + tp := NewMemoryCacheTransport() + tp.Transport = &tmock + + // First time, response is cached on success + r, _ := http.NewRequest("GET", "http://somewhere.com/", nil) + resp, err := tp.RoundTrip(r) + if err != nil { + t.Fatal(err) + } + if resp == nil { + t.Fatal("resp is nil") + } + _, err = ioutil.ReadAll(resp.Body) + if err != nil { + t.Fatal(err) + } + + // On failure, response is returned from the cache + tmock.response = nil + tmock.err = errors.New("some error") + resp, err = tp.RoundTrip(r) + if err != nil { + t.Fatal(err) + } + if resp == nil { + t.Fatal("resp is nil") + } + + // If failure last more than max stale, error is returned + clock = &fakeClock{elapsed: 200 * time.Second} + _, err = tp.RoundTrip(r) + if err != tmock.err { + t.Fatalf("got err %v, want %v", err, tmock.err) + } +} + +// This tests the fix for https://github.com/gregjones/httpcache/issues/74. +// Previously, after a stale response was used after encountering an error, +// its StatusCode was being incorrectly replaced. +func TestStaleIfErrorKeepsStatus(t *testing.T) { + resetTest() + now := time.Now() + tmock := transportMock{ + response: &http.Response{ + Status: http.StatusText(http.StatusNotFound), + StatusCode: http.StatusNotFound, + Header: http.Header{ + "Date": []string{now.Format(time.RFC1123)}, + "Cache-Control": []string{"no-cache"}, + }, + Body: ioutil.NopCloser(bytes.NewBuffer([]byte("some data"))), + }, + err: nil, + } + tp := NewMemoryCacheTransport() + tp.Transport = &tmock + + // First time, response is cached on success + r, _ := http.NewRequest("GET", "http://somewhere.com/", nil) + r.Header.Set("Cache-Control", "stale-if-error") + resp, err := tp.RoundTrip(r) + if err != nil { + t.Fatal(err) + } + if resp == nil { + t.Fatal("resp is nil") + } + _, err = ioutil.ReadAll(resp.Body) + if err != nil { + t.Fatal(err) + } + + // On failure, response is returned from the cache + tmock.response = nil + tmock.err = errors.New("some error") + resp, err = tp.RoundTrip(r) + if err != nil { + t.Fatal(err) + } + if resp == nil { + t.Fatal("resp is nil") + } + if resp.StatusCode != http.StatusNotFound { + t.Fatalf("Status wasn't 404: %d", resp.StatusCode) + } +} + +// Test that http.Client.Timeout is respected when cache transport is used. +// That is so as long as request cancellation is propagated correctly. +// In the past, that required CancelRequest to be implemented correctly, +// but modern http.Client uses Request.Cancel (or request context) instead, +// so we don't have to do anything. +func TestClientTimeout(t *testing.T) { + if testing.Short() { + t.Skip("skipping timeout test in short mode") // Because it takes at least 3 seconds to run. + } + resetTest() + client := &http.Client{ + Transport: NewMemoryCacheTransport(), + Timeout: time.Second, + } + started := time.Now() + resp, err := client.Get(s.server.URL + "/3seconds") + taken := time.Since(started) + if err == nil { + t.Error("got nil error, want timeout error") + } + if resp != nil { + t.Error("got non-nil resp, want nil resp") + } + if taken >= 2*time.Second { + t.Error("client.Do took 2+ seconds, want < 2 seconds") + } +} diff --git a/vendor/github.com/gregjones/httpcache/leveldbcache/leveldbcache.go b/vendor/github.com/gregjones/httpcache/leveldbcache/leveldbcache.go new file mode 100644 index 000000000..9bcb7e277 --- /dev/null +++ b/vendor/github.com/gregjones/httpcache/leveldbcache/leveldbcache.go @@ -0,0 +1,51 @@ +// Package leveldbcache provides an implementation of httpcache.Cache that +// uses github.com/syndtr/goleveldb/leveldb +package leveldbcache + +import ( + "github.com/syndtr/goleveldb/leveldb" +) + +// Cache is an implementation of httpcache.Cache with leveldb storage +type Cache struct { + db *leveldb.DB +} + +// Get returns the response corresponding to key if present +func (c *Cache) Get(key string) (resp []byte, ok bool) { + var err error + resp, err = c.db.Get([]byte(key), nil) + if err != nil { + return []byte{}, false + } + return resp, true +} + +// Set saves a response to the cache as key +func (c *Cache) Set(key string, resp []byte) { + c.db.Put([]byte(key), resp, nil) +} + +// Delete removes the response with key from the cache +func (c *Cache) Delete(key string) { + c.db.Delete([]byte(key), nil) +} + +// New returns a new Cache that will store leveldb in path +func New(path string) (*Cache, error) { + cache := &Cache{} + + var err error + cache.db, err = leveldb.OpenFile(path, nil) + + if err != nil { + return nil, err + } + return cache, nil +} + +// NewWithDB returns a new Cache using the provided leveldb as underlying +// storage. +func NewWithDB(db *leveldb.DB) *Cache { + return &Cache{db} +} diff --git a/vendor/github.com/gregjones/httpcache/leveldbcache/leveldbcache_test.go b/vendor/github.com/gregjones/httpcache/leveldbcache/leveldbcache_test.go new file mode 100644 index 000000000..b885c0169 --- /dev/null +++ b/vendor/github.com/gregjones/httpcache/leveldbcache/leveldbcache_test.go @@ -0,0 +1,46 @@ +package leveldbcache + +import ( + "bytes" + "io/ioutil" + "os" + "path/filepath" + "testing" +) + +func TestDiskCache(t *testing.T) { + tempDir, err := ioutil.TempDir("", "httpcache") + if err != nil { + t.Fatalf("TempDir: %v", err) + } + defer os.RemoveAll(tempDir) + + cache, err := New(filepath.Join(tempDir, "db")) + if err != nil { + t.Fatalf("New leveldb,: %v", err) + } + + key := "testKey" + _, ok := cache.Get(key) + if ok { + t.Fatal("retrieved key before adding it") + } + + val := []byte("some bytes") + cache.Set(key, val) + + retVal, ok := cache.Get(key) + if !ok { + t.Fatal("could not retrieve an element we just added") + } + if !bytes.Equal(retVal, val) { + t.Fatal("retrieved a different value than what we put in") + } + + cache.Delete(key) + + _, ok = cache.Get(key) + if ok { + t.Fatal("deleted key still present") + } +} diff --git a/vendor/github.com/gregjones/httpcache/memcache/appengine.go b/vendor/github.com/gregjones/httpcache/memcache/appengine.go new file mode 100644 index 000000000..e68d9bc09 --- /dev/null +++ b/vendor/github.com/gregjones/httpcache/memcache/appengine.go @@ -0,0 +1,61 @@ +// +build appengine + +// Package memcache provides an implementation of httpcache.Cache that uses App +// Engine's memcache package to store cached responses. +// +// When not built for Google App Engine, this package will provide an +// implementation that connects to a specified memcached server. See the +// memcache.go file in this package for details. +package memcache + +import ( + "appengine" + "appengine/memcache" +) + +// Cache is an implementation of httpcache.Cache that caches responses in App +// Engine's memcache. +type Cache struct { + appengine.Context +} + +// cacheKey modifies an httpcache key for use in memcache. Specifically, it +// prefixes keys to avoid collision with other data stored in memcache. +func cacheKey(key string) string { + return "httpcache:" + key +} + +// Get returns the response corresponding to key if present. +func (c *Cache) Get(key string) (resp []byte, ok bool) { + item, err := memcache.Get(c.Context, cacheKey(key)) + if err != nil { + if err != memcache.ErrCacheMiss { + c.Context.Errorf("error getting cached response: %v", err) + } + return nil, false + } + return item.Value, true +} + +// Set saves a response to the cache as key. +func (c *Cache) Set(key string, resp []byte) { + item := &memcache.Item{ + Key: cacheKey(key), + Value: resp, + } + if err := memcache.Set(c.Context, item); err != nil { + c.Context.Errorf("error caching response: %v", err) + } +} + +// Delete removes the response with key from the cache. +func (c *Cache) Delete(key string) { + if err := memcache.Delete(c.Context, cacheKey(key)); err != nil { + c.Context.Errorf("error deleting cached response: %v", err) + } +} + +// New returns a new Cache for the given context. +func New(ctx appengine.Context) *Cache { + return &Cache{ctx} +} diff --git a/vendor/github.com/gregjones/httpcache/memcache/appengine_test.go b/vendor/github.com/gregjones/httpcache/memcache/appengine_test.go new file mode 100644 index 000000000..818b2776e --- /dev/null +++ b/vendor/github.com/gregjones/httpcache/memcache/appengine_test.go @@ -0,0 +1,44 @@ +// +build appengine + +package memcache + +import ( + "bytes" + "testing" + + "appengine/aetest" +) + +func TestAppEngine(t *testing.T) { + ctx, err := aetest.NewContext(nil) + if err != nil { + t.Fatal(err) + } + defer ctx.Close() + + cache := New(ctx) + + key := "testKey" + _, ok := cache.Get(key) + if ok { + t.Fatal("retrieved key before adding it") + } + + val := []byte("some bytes") + cache.Set(key, val) + + retVal, ok := cache.Get(key) + if !ok { + t.Fatal("could not retrieve an element we just added") + } + if !bytes.Equal(retVal, val) { + t.Fatal("retrieved a different value than what we put in") + } + + cache.Delete(key) + + _, ok = cache.Get(key) + if ok { + t.Fatal("deleted key still present") + } +} diff --git a/vendor/github.com/gregjones/httpcache/memcache/memcache.go b/vendor/github.com/gregjones/httpcache/memcache/memcache.go new file mode 100644 index 000000000..462f0e541 --- /dev/null +++ b/vendor/github.com/gregjones/httpcache/memcache/memcache.go @@ -0,0 +1,60 @@ +// +build !appengine + +// Package memcache provides an implementation of httpcache.Cache that uses +// gomemcache to store cached responses. +// +// When built for Google App Engine, this package will provide an +// implementation that uses App Engine's memcache service. See the +// appengine.go file in this package for details. +package memcache + +import ( + "github.com/bradfitz/gomemcache/memcache" +) + +// Cache is an implementation of httpcache.Cache that caches responses in a +// memcache server. +type Cache struct { + *memcache.Client +} + +// cacheKey modifies an httpcache key for use in memcache. Specifically, it +// prefixes keys to avoid collision with other data stored in memcache. +func cacheKey(key string) string { + return "httpcache:" + key +} + +// Get returns the response corresponding to key if present. +func (c *Cache) Get(key string) (resp []byte, ok bool) { + item, err := c.Client.Get(cacheKey(key)) + if err != nil { + return nil, false + } + return item.Value, true +} + +// Set saves a response to the cache as key. +func (c *Cache) Set(key string, resp []byte) { + item := &memcache.Item{ + Key: cacheKey(key), + Value: resp, + } + c.Client.Set(item) +} + +// Delete removes the response with key from the cache. +func (c *Cache) Delete(key string) { + c.Client.Delete(cacheKey(key)) +} + +// New returns a new Cache using the provided memcache server(s) with equal +// weight. If a server is listed multiple times, it gets a proportional amount +// of weight. +func New(server ...string) *Cache { + return NewWithClient(memcache.New(server...)) +} + +// NewWithClient returns a new Cache with the given memcache client. +func NewWithClient(client *memcache.Client) *Cache { + return &Cache{client} +} diff --git a/vendor/github.com/gregjones/httpcache/memcache/memcache_test.go b/vendor/github.com/gregjones/httpcache/memcache/memcache_test.go new file mode 100644 index 000000000..4dcc547c6 --- /dev/null +++ b/vendor/github.com/gregjones/httpcache/memcache/memcache_test.go @@ -0,0 +1,47 @@ +// +build !appengine + +package memcache + +import ( + "bytes" + "net" + "testing" +) + +const testServer = "localhost:11211" + +func TestMemCache(t *testing.T) { + conn, err := net.Dial("tcp", testServer) + if err != nil { + // TODO: rather than skip the test, fall back to a faked memcached server + t.Skipf("skipping test; no server running at %s", testServer) + } + conn.Write([]byte("flush_all\r\n")) // flush memcache + conn.Close() + + cache := New(testServer) + + key := "testKey" + _, ok := cache.Get(key) + if ok { + t.Fatal("retrieved key before adding it") + } + + val := []byte("some bytes") + cache.Set(key, val) + + retVal, ok := cache.Get(key) + if !ok { + t.Fatal("could not retrieve an element we just added") + } + if !bytes.Equal(retVal, val) { + t.Fatal("retrieved a different value than what we put in") + } + + cache.Delete(key) + + _, ok = cache.Get(key) + if ok { + t.Fatal("deleted key still present") + } +} diff --git a/vendor/github.com/gregjones/httpcache/redis/redis.go b/vendor/github.com/gregjones/httpcache/redis/redis.go new file mode 100644 index 000000000..3143d4438 --- /dev/null +++ b/vendor/github.com/gregjones/httpcache/redis/redis.go @@ -0,0 +1,43 @@ +// Package redis provides a redis interface for http caching. +package redis + +import ( + "github.com/garyburd/redigo/redis" + "github.com/gregjones/httpcache" +) + +// cache is an implementation of httpcache.Cache that caches responses in a +// redis server. +type cache struct { + redis.Conn +} + +// cacheKey modifies an httpcache key for use in redis. Specifically, it +// prefixes keys to avoid collision with other data stored in redis. +func cacheKey(key string) string { + return "rediscache:" + key +} + +// Get returns the response corresponding to key if present. +func (c cache) Get(key string) (resp []byte, ok bool) { + item, err := redis.Bytes(c.Do("GET", cacheKey(key))) + if err != nil { + return nil, false + } + return item, true +} + +// Set saves a response to the cache as key. +func (c cache) Set(key string, resp []byte) { + c.Do("SET", cacheKey(key), resp) +} + +// Delete removes the response with key from the cache. +func (c cache) Delete(key string) { + c.Do("DEL", cacheKey(key)) +} + +// NewWithClient returns a new Cache with the given redis connection. +func NewWithClient(client redis.Conn) httpcache.Cache { + return cache{client} +} diff --git a/vendor/github.com/gregjones/httpcache/redis/redis_test.go b/vendor/github.com/gregjones/httpcache/redis/redis_test.go new file mode 100644 index 000000000..72f6f619a --- /dev/null +++ b/vendor/github.com/gregjones/httpcache/redis/redis_test.go @@ -0,0 +1,43 @@ +package redis + +import ( + "bytes" + "testing" + + "github.com/garyburd/redigo/redis" +) + +func TestRedisCache(t *testing.T) { + conn, err := redis.Dial("tcp", "localhost:6379") + if err != nil { + // TODO: rather than skip the test, fall back to a faked redis server + t.Skipf("skipping test; no server running at localhost:6379") + } + conn.Do("FLUSHALL") + + cache := NewWithClient(conn) + + key := "testKey" + _, ok := cache.Get(key) + if ok { + t.Fatal("retrieved key before adding it") + } + + val := []byte("some bytes") + cache.Set(key, val) + + retVal, ok := cache.Get(key) + if !ok { + t.Fatal("could not retrieve an element we just added") + } + if !bytes.Equal(retVal, val) { + t.Fatal("retrieved a different value than what we put in") + } + + cache.Delete(key) + + _, ok = cache.Get(key) + if ok { + t.Fatal("deleted key still present") + } +} diff --git a/vendor/github.com/hashicorp/hcl/.github/ISSUE_TEMPLATE.md b/vendor/github.com/hashicorp/hcl/.github/ISSUE_TEMPLATE.md new file mode 100644 index 000000000..2d7fc4bf6 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/.github/ISSUE_TEMPLATE.md @@ -0,0 +1,21 @@ +### HCL Template +```hcl +# Place your HCL configuration file here +``` + +### Expected behavior +What should have happened? + +### Actual behavior +What actually happened? + +### Steps to reproduce +1. +2. +3. + +### References +Are there any other GitHub issues (open or closed) that should +be linked here? For example: +- GH-1234 +- ... diff --git a/vendor/github.com/hashicorp/hcl/.gitignore b/vendor/github.com/hashicorp/hcl/.gitignore new file mode 100644 index 000000000..15586a2b5 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/.gitignore @@ -0,0 +1,9 @@ +y.output + +# ignore intellij files +.idea +*.iml +*.ipr +*.iws + +*.test diff --git a/vendor/github.com/hashicorp/hcl/.travis.yml b/vendor/github.com/hashicorp/hcl/.travis.yml new file mode 100644 index 000000000..cb63a3216 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/.travis.yml @@ -0,0 +1,13 @@ +sudo: false + +language: go + +go: + - 1.x + - tip + +branches: + only: + - master + +script: make test diff --git a/vendor/github.com/hashicorp/hcl/LICENSE b/vendor/github.com/hashicorp/hcl/LICENSE new file mode 100644 index 000000000..c33dcc7c9 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/LICENSE @@ -0,0 +1,354 @@ +Mozilla Public License, version 2.0 + +1. Definitions + +1.1. “Contributor” + + means each individual or legal entity that creates, contributes to the + creation of, or owns Covered Software. + +1.2. “Contributor Version” + + means the combination of the Contributions of others (if any) used by a + Contributor and that particular Contributor’s Contribution. + +1.3. “Contribution” + + means Covered Software of a particular Contributor. + +1.4. “Covered Software” + + means Source Code Form to which the initial Contributor has attached the + notice in Exhibit A, the Executable Form of such Source Code Form, and + Modifications of such Source Code Form, in each case including portions + thereof. + +1.5. “Incompatible With Secondary Licenses” + means + + a. that the initial Contributor has attached the notice described in + Exhibit B to the Covered Software; or + + b. that the Covered Software was made available under the terms of version + 1.1 or earlier of the License, but not also under the terms of a + Secondary License. + +1.6. “Executable Form” + + means any form of the work other than Source Code Form. + +1.7. “Larger Work” + + means a work that combines Covered Software with other material, in a separate + file or files, that is not Covered Software. + +1.8. “License” + + means this document. + +1.9. “Licensable” + + means having the right to grant, to the maximum extent possible, whether at the + time of the initial grant or subsequently, any and all of the rights conveyed by + this License. + +1.10. “Modifications” + + means any of the following: + + a. any file in Source Code Form that results from an addition to, deletion + from, or modification of the contents of Covered Software; or + + b. any new file in Source Code Form that contains any Covered Software. + +1.11. “Patent Claims” of a Contributor + + means any patent claim(s), including without limitation, method, process, + and apparatus claims, in any patent Licensable by such Contributor that + would be infringed, but for the grant of the License, by the making, + using, selling, offering for sale, having made, import, or transfer of + either its Contributions or its Contributor Version. + +1.12. “Secondary License” + + means either the GNU General Public License, Version 2.0, the GNU Lesser + General Public License, Version 2.1, the GNU Affero General Public + License, Version 3.0, or any later versions of those licenses. + +1.13. “Source Code Form” + + means the form of the work preferred for making modifications. + +1.14. “You” (or “Your”) + + means an individual or a legal entity exercising rights under this + License. For legal entities, “You” includes any entity that controls, is + controlled by, or is under common control with You. For purposes of this + definition, “control” means (a) the power, direct or indirect, to cause + the direction or management of such entity, whether by contract or + otherwise, or (b) ownership of more than fifty percent (50%) of the + outstanding shares or beneficial ownership of such entity. + + +2. License Grants and Conditions + +2.1. Grants + + Each Contributor hereby grants You a world-wide, royalty-free, + non-exclusive license: + + a. under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or as + part of a Larger Work; and + + b. under Patent Claims of such Contributor to make, use, sell, offer for + sale, have made, import, and otherwise transfer either its Contributions + or its Contributor Version. + +2.2. Effective Date + + The licenses granted in Section 2.1 with respect to any Contribution become + effective for each Contribution on the date the Contributor first distributes + such Contribution. + +2.3. Limitations on Grant Scope + + The licenses granted in this Section 2 are the only rights granted under this + License. No additional rights or licenses will be implied from the distribution + or licensing of Covered Software under this License. Notwithstanding Section + 2.1(b) above, no patent license is granted by a Contributor: + + a. for any code that a Contributor has removed from Covered Software; or + + b. for infringements caused by: (i) Your and any other third party’s + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + + c. under Patent Claims infringed by Covered Software in the absence of its + Contributions. + + This License does not grant any rights in the trademarks, service marks, or + logos of any Contributor (except as may be necessary to comply with the + notice requirements in Section 3.4). + +2.4. Subsequent Licenses + + No Contributor makes additional grants as a result of Your choice to + distribute the Covered Software under a subsequent version of this License + (see Section 10.2) or under the terms of a Secondary License (if permitted + under the terms of Section 3.3). + +2.5. Representation + + Each Contributor represents that the Contributor believes its Contributions + are its original creation(s) or it has sufficient rights to grant the + rights to its Contributions conveyed by this License. + +2.6. Fair Use + + This License is not intended to limit any rights You have under applicable + copyright doctrines of fair use, fair dealing, or other equivalents. + +2.7. Conditions + + Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in + Section 2.1. + + +3. Responsibilities + +3.1. Distribution of Source Form + + All distribution of Covered Software in Source Code Form, including any + Modifications that You create or to which You contribute, must be under the + terms of this License. You must inform recipients that the Source Code Form + of the Covered Software is governed by the terms of this License, and how + they can obtain a copy of this License. You may not attempt to alter or + restrict the recipients’ rights in the Source Code Form. + +3.2. Distribution of Executable Form + + If You distribute Covered Software in Executable Form then: + + a. such Covered Software must also be made available in Source Code Form, + as described in Section 3.1, and You must inform recipients of the + Executable Form how they can obtain a copy of such Source Code Form by + reasonable means in a timely manner, at a charge no more than the cost + of distribution to the recipient; and + + b. You may distribute such Executable Form under the terms of this License, + or sublicense it under different terms, provided that the license for + the Executable Form does not attempt to limit or alter the recipients’ + rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + + You may create and distribute a Larger Work under terms of Your choice, + provided that You also comply with the requirements of this License for the + Covered Software. If the Larger Work is a combination of Covered Software + with a work governed by one or more Secondary Licenses, and the Covered + Software is not Incompatible With Secondary Licenses, this License permits + You to additionally distribute such Covered Software under the terms of + such Secondary License(s), so that the recipient of the Larger Work may, at + their option, further distribute the Covered Software under the terms of + either this License or such Secondary License(s). + +3.4. Notices + + You may not remove or alter the substance of any license notices (including + copyright notices, patent notices, disclaimers of warranty, or limitations + of liability) contained within the Source Code Form of the Covered + Software, except that You may alter any license notices to the extent + required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + + You may choose to offer, and to charge a fee for, warranty, support, + indemnity or liability obligations to one or more recipients of Covered + Software. However, You may do so only on Your own behalf, and not on behalf + of any Contributor. You must make it absolutely clear that any such + warranty, support, indemnity, or liability obligation is offered by You + alone, and You hereby agree to indemnify every Contributor for any + liability incurred by such Contributor as a result of warranty, support, + indemnity or liability terms You offer. You may include additional + disclaimers of warranty and limitations of liability specific to any + jurisdiction. + +4. Inability to Comply Due to Statute or Regulation + + If it is impossible for You to comply with any of the terms of this License + with respect to some or all of the Covered Software due to statute, judicial + order, or regulation then You must: (a) comply with the terms of this License + to the maximum extent possible; and (b) describe the limitations and the code + they affect. Such description must be placed in a text file included with all + distributions of the Covered Software under this License. Except to the + extent prohibited by statute or regulation, such description must be + sufficiently detailed for a recipient of ordinary skill to be able to + understand it. + +5. Termination + +5.1. The rights granted under this License will terminate automatically if You + fail to comply with any of its terms. However, if You become compliant, + then the rights granted under this License from a particular Contributor + are reinstated (a) provisionally, unless and until such Contributor + explicitly and finally terminates Your grants, and (b) on an ongoing basis, + if such Contributor fails to notify You of the non-compliance by some + reasonable means prior to 60 days after You have come back into compliance. + Moreover, Your grants from a particular Contributor are reinstated on an + ongoing basis if such Contributor notifies You of the non-compliance by + some reasonable means, this is the first time You have received notice of + non-compliance with this License from such Contributor, and You become + compliant prior to 30 days after Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent + infringement claim (excluding declaratory judgment actions, counter-claims, + and cross-claims) alleging that a Contributor Version directly or + indirectly infringes any patent, then the rights granted to You by any and + all Contributors for the Covered Software under Section 2.1 of this License + shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user + license agreements (excluding distributors and resellers) which have been + validly granted by You or Your distributors under this License prior to + termination shall survive termination. + +6. Disclaimer of Warranty + + Covered Software is provided under this License on an “as is” basis, without + warranty of any kind, either expressed, implied, or statutory, including, + without limitation, warranties that the Covered Software is free of defects, + merchantable, fit for a particular purpose or non-infringing. The entire + risk as to the quality and performance of the Covered Software is with You. + Should any Covered Software prove defective in any respect, You (not any + Contributor) assume the cost of any necessary servicing, repair, or + correction. This disclaimer of warranty constitutes an essential part of this + License. No use of any Covered Software is authorized under this License + except under this disclaimer. + +7. Limitation of Liability + + Under no circumstances and under no legal theory, whether tort (including + negligence), contract, or otherwise, shall any Contributor, or anyone who + distributes Covered Software as permitted above, be liable to You for any + direct, indirect, special, incidental, or consequential damages of any + character including, without limitation, damages for lost profits, loss of + goodwill, work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses, even if such party shall have been + informed of the possibility of such damages. This limitation of liability + shall not apply to liability for death or personal injury resulting from such + party’s negligence to the extent applicable law prohibits such limitation. + Some jurisdictions do not allow the exclusion or limitation of incidental or + consequential damages, so this exclusion and limitation may not apply to You. + +8. Litigation + + Any litigation relating to this License may be brought only in the courts of + a jurisdiction where the defendant maintains its principal place of business + and such litigation shall be governed by laws of that jurisdiction, without + reference to its conflict-of-law provisions. Nothing in this Section shall + prevent a party’s ability to bring cross-claims or counter-claims. + +9. Miscellaneous + + This License represents the complete agreement concerning the subject matter + hereof. If any provision of this License is held to be unenforceable, such + provision shall be reformed only to the extent necessary to make it + enforceable. Any law or regulation which provides that the language of a + contract shall be construed against the drafter shall not be used to construe + this License against a Contributor. + + +10. Versions of the License + +10.1. New Versions + + Mozilla Foundation is the license steward. Except as provided in Section + 10.3, no one other than the license steward has the right to modify or + publish new versions of this License. Each version will be given a + distinguishing version number. + +10.2. Effect of New Versions + + You may distribute the Covered Software under the terms of the version of + the License under which You originally received the Covered Software, or + under the terms of any subsequent version published by the license + steward. + +10.3. Modified Versions + + If you create software not governed by this License, and you want to + create a new license for such software, you may create and use a modified + version of this License if you rename the license and remove any + references to the name of the license steward (except to note that such + modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses + If You choose to distribute Source Code Form that is Incompatible With + Secondary Licenses under the terms of this version of the License, the + notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice + + This Source Code Form is subject to the + terms of the Mozilla Public License, v. + 2.0. If a copy of the MPL was not + distributed with this file, You can + obtain one at + http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular file, then +You may include the notice in a location (such as a LICENSE file in a relevant +directory) where a recipient would be likely to look for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - “Incompatible With Secondary Licenses” Notice + + This Source Code Form is “Incompatible + With Secondary Licenses”, as defined by + the Mozilla Public License, v. 2.0. + diff --git a/vendor/github.com/hashicorp/hcl/Makefile b/vendor/github.com/hashicorp/hcl/Makefile new file mode 100644 index 000000000..84fd743f5 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/Makefile @@ -0,0 +1,18 @@ +TEST?=./... + +default: test + +fmt: generate + go fmt ./... + +test: generate + go get -t ./... + go test $(TEST) $(TESTARGS) + +generate: + go generate ./... + +updatedeps: + go get -u golang.org/x/tools/cmd/stringer + +.PHONY: default generate test updatedeps diff --git a/vendor/github.com/hashicorp/hcl/README.md b/vendor/github.com/hashicorp/hcl/README.md new file mode 100644 index 000000000..c8223326d --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/README.md @@ -0,0 +1,125 @@ +# HCL + +[![GoDoc](https://godoc.org/github.com/hashicorp/hcl?status.png)](https://godoc.org/github.com/hashicorp/hcl) [![Build Status](https://travis-ci.org/hashicorp/hcl.svg?branch=master)](https://travis-ci.org/hashicorp/hcl) + +HCL (HashiCorp Configuration Language) is a configuration language built +by HashiCorp. The goal of HCL is to build a structured configuration language +that is both human and machine friendly for use with command-line tools, but +specifically targeted towards DevOps tools, servers, etc. + +HCL is also fully JSON compatible. That is, JSON can be used as completely +valid input to a system expecting HCL. This helps makes systems +interoperable with other systems. + +HCL is heavily inspired by +[libucl](https://github.com/vstakhov/libucl), +nginx configuration, and others similar. + +## Why? + +A common question when viewing HCL is to ask the question: why not +JSON, YAML, etc.? + +Prior to HCL, the tools we built at [HashiCorp](http://www.hashicorp.com) +used a variety of configuration languages from full programming languages +such as Ruby to complete data structure languages such as JSON. What we +learned is that some people wanted human-friendly configuration languages +and some people wanted machine-friendly languages. + +JSON fits a nice balance in this, but is fairly verbose and most +importantly doesn't support comments. With YAML, we found that beginners +had a really hard time determining what the actual structure was, and +ended up guessing more often than not whether to use a hyphen, colon, etc. +in order to represent some configuration key. + +Full programming languages such as Ruby enable complex behavior +a configuration language shouldn't usually allow, and also forces +people to learn some set of Ruby. + +Because of this, we decided to create our own configuration language +that is JSON-compatible. Our configuration language (HCL) is designed +to be written and modified by humans. The API for HCL allows JSON +as an input so that it is also machine-friendly (machines can generate +JSON instead of trying to generate HCL). + +Our goal with HCL is not to alienate other configuration languages. +It is instead to provide HCL as a specialized language for our tools, +and JSON as the interoperability layer. + +## Syntax + +For a complete grammar, please see the parser itself. A high-level overview +of the syntax and grammar is listed here. + + * Single line comments start with `#` or `//` + + * Multi-line comments are wrapped in `/*` and `*/`. Nested block comments + are not allowed. A multi-line comment (also known as a block comment) + terminates at the first `*/` found. + + * Values are assigned with the syntax `key = value` (whitespace doesn't + matter). The value can be any primitive: a string, number, boolean, + object, or list. + + * Strings are double-quoted and can contain any UTF-8 characters. + Example: `"Hello, World"` + + * Multi-line strings start with `<- + echo %Path% + + go version + + go env + + go get -t ./... + +build_script: +- cmd: go test -v ./... diff --git a/vendor/github.com/hashicorp/hcl/decoder.go b/vendor/github.com/hashicorp/hcl/decoder.go new file mode 100644 index 000000000..bed9ebbe1 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/decoder.go @@ -0,0 +1,729 @@ +package hcl + +import ( + "errors" + "fmt" + "reflect" + "sort" + "strconv" + "strings" + + "github.com/hashicorp/hcl/hcl/ast" + "github.com/hashicorp/hcl/hcl/parser" + "github.com/hashicorp/hcl/hcl/token" +) + +// This is the tag to use with structures to have settings for HCL +const tagName = "hcl" + +var ( + // nodeType holds a reference to the type of ast.Node + nodeType reflect.Type = findNodeType() +) + +// Unmarshal accepts a byte slice as input and writes the +// data to the value pointed to by v. +func Unmarshal(bs []byte, v interface{}) error { + root, err := parse(bs) + if err != nil { + return err + } + + return DecodeObject(v, root) +} + +// Decode reads the given input and decodes it into the structure +// given by `out`. +func Decode(out interface{}, in string) error { + obj, err := Parse(in) + if err != nil { + return err + } + + return DecodeObject(out, obj) +} + +// DecodeObject is a lower-level version of Decode. It decodes a +// raw Object into the given output. +func DecodeObject(out interface{}, n ast.Node) error { + val := reflect.ValueOf(out) + if val.Kind() != reflect.Ptr { + return errors.New("result must be a pointer") + } + + // If we have the file, we really decode the root node + if f, ok := n.(*ast.File); ok { + n = f.Node + } + + var d decoder + return d.decode("root", n, val.Elem()) +} + +type decoder struct { + stack []reflect.Kind +} + +func (d *decoder) decode(name string, node ast.Node, result reflect.Value) error { + k := result + + // If we have an interface with a valid value, we use that + // for the check. + if result.Kind() == reflect.Interface { + elem := result.Elem() + if elem.IsValid() { + k = elem + } + } + + // Push current onto stack unless it is an interface. + if k.Kind() != reflect.Interface { + d.stack = append(d.stack, k.Kind()) + + // Schedule a pop + defer func() { + d.stack = d.stack[:len(d.stack)-1] + }() + } + + switch k.Kind() { + case reflect.Bool: + return d.decodeBool(name, node, result) + case reflect.Float32, reflect.Float64: + return d.decodeFloat(name, node, result) + case reflect.Int, reflect.Int32, reflect.Int64: + return d.decodeInt(name, node, result) + case reflect.Interface: + // When we see an interface, we make our own thing + return d.decodeInterface(name, node, result) + case reflect.Map: + return d.decodeMap(name, node, result) + case reflect.Ptr: + return d.decodePtr(name, node, result) + case reflect.Slice: + return d.decodeSlice(name, node, result) + case reflect.String: + return d.decodeString(name, node, result) + case reflect.Struct: + return d.decodeStruct(name, node, result) + default: + return &parser.PosError{ + Pos: node.Pos(), + Err: fmt.Errorf("%s: unknown kind to decode into: %s", name, k.Kind()), + } + } +} + +func (d *decoder) decodeBool(name string, node ast.Node, result reflect.Value) error { + switch n := node.(type) { + case *ast.LiteralType: + if n.Token.Type == token.BOOL { + v, err := strconv.ParseBool(n.Token.Text) + if err != nil { + return err + } + + result.Set(reflect.ValueOf(v)) + return nil + } + } + + return &parser.PosError{ + Pos: node.Pos(), + Err: fmt.Errorf("%s: unknown type %T", name, node), + } +} + +func (d *decoder) decodeFloat(name string, node ast.Node, result reflect.Value) error { + switch n := node.(type) { + case *ast.LiteralType: + if n.Token.Type == token.FLOAT || n.Token.Type == token.NUMBER { + v, err := strconv.ParseFloat(n.Token.Text, 64) + if err != nil { + return err + } + + result.Set(reflect.ValueOf(v).Convert(result.Type())) + return nil + } + } + + return &parser.PosError{ + Pos: node.Pos(), + Err: fmt.Errorf("%s: unknown type %T", name, node), + } +} + +func (d *decoder) decodeInt(name string, node ast.Node, result reflect.Value) error { + switch n := node.(type) { + case *ast.LiteralType: + switch n.Token.Type { + case token.NUMBER: + v, err := strconv.ParseInt(n.Token.Text, 0, 0) + if err != nil { + return err + } + + if result.Kind() == reflect.Interface { + result.Set(reflect.ValueOf(int(v))) + } else { + result.SetInt(v) + } + return nil + case token.STRING: + v, err := strconv.ParseInt(n.Token.Value().(string), 0, 0) + if err != nil { + return err + } + + if result.Kind() == reflect.Interface { + result.Set(reflect.ValueOf(int(v))) + } else { + result.SetInt(v) + } + return nil + } + } + + return &parser.PosError{ + Pos: node.Pos(), + Err: fmt.Errorf("%s: unknown type %T", name, node), + } +} + +func (d *decoder) decodeInterface(name string, node ast.Node, result reflect.Value) error { + // When we see an ast.Node, we retain the value to enable deferred decoding. + // Very useful in situations where we want to preserve ast.Node information + // like Pos + if result.Type() == nodeType && result.CanSet() { + result.Set(reflect.ValueOf(node)) + return nil + } + + var set reflect.Value + redecode := true + + // For testing types, ObjectType should just be treated as a list. We + // set this to a temporary var because we want to pass in the real node. + testNode := node + if ot, ok := node.(*ast.ObjectType); ok { + testNode = ot.List + } + + switch n := testNode.(type) { + case *ast.ObjectList: + // If we're at the root or we're directly within a slice, then we + // decode objects into map[string]interface{}, otherwise we decode + // them into lists. + if len(d.stack) == 0 || d.stack[len(d.stack)-1] == reflect.Slice { + var temp map[string]interface{} + tempVal := reflect.ValueOf(temp) + result := reflect.MakeMap( + reflect.MapOf( + reflect.TypeOf(""), + tempVal.Type().Elem())) + + set = result + } else { + var temp []map[string]interface{} + tempVal := reflect.ValueOf(temp) + result := reflect.MakeSlice( + reflect.SliceOf(tempVal.Type().Elem()), 0, len(n.Items)) + set = result + } + case *ast.ObjectType: + // If we're at the root or we're directly within a slice, then we + // decode objects into map[string]interface{}, otherwise we decode + // them into lists. + if len(d.stack) == 0 || d.stack[len(d.stack)-1] == reflect.Slice { + var temp map[string]interface{} + tempVal := reflect.ValueOf(temp) + result := reflect.MakeMap( + reflect.MapOf( + reflect.TypeOf(""), + tempVal.Type().Elem())) + + set = result + } else { + var temp []map[string]interface{} + tempVal := reflect.ValueOf(temp) + result := reflect.MakeSlice( + reflect.SliceOf(tempVal.Type().Elem()), 0, 1) + set = result + } + case *ast.ListType: + var temp []interface{} + tempVal := reflect.ValueOf(temp) + result := reflect.MakeSlice( + reflect.SliceOf(tempVal.Type().Elem()), 0, 0) + set = result + case *ast.LiteralType: + switch n.Token.Type { + case token.BOOL: + var result bool + set = reflect.Indirect(reflect.New(reflect.TypeOf(result))) + case token.FLOAT: + var result float64 + set = reflect.Indirect(reflect.New(reflect.TypeOf(result))) + case token.NUMBER: + var result int + set = reflect.Indirect(reflect.New(reflect.TypeOf(result))) + case token.STRING, token.HEREDOC: + set = reflect.Indirect(reflect.New(reflect.TypeOf(""))) + default: + return &parser.PosError{ + Pos: node.Pos(), + Err: fmt.Errorf("%s: cannot decode into interface: %T", name, node), + } + } + default: + return fmt.Errorf( + "%s: cannot decode into interface: %T", + name, node) + } + + // Set the result to what its supposed to be, then reset + // result so we don't reflect into this method anymore. + result.Set(set) + + if redecode { + // Revisit the node so that we can use the newly instantiated + // thing and populate it. + if err := d.decode(name, node, result); err != nil { + return err + } + } + + return nil +} + +func (d *decoder) decodeMap(name string, node ast.Node, result reflect.Value) error { + if item, ok := node.(*ast.ObjectItem); ok { + node = &ast.ObjectList{Items: []*ast.ObjectItem{item}} + } + + if ot, ok := node.(*ast.ObjectType); ok { + node = ot.List + } + + n, ok := node.(*ast.ObjectList) + if !ok { + return &parser.PosError{ + Pos: node.Pos(), + Err: fmt.Errorf("%s: not an object type for map (%T)", name, node), + } + } + + // If we have an interface, then we can address the interface, + // but not the slice itself, so get the element but set the interface + set := result + if result.Kind() == reflect.Interface { + result = result.Elem() + } + + resultType := result.Type() + resultElemType := resultType.Elem() + resultKeyType := resultType.Key() + if resultKeyType.Kind() != reflect.String { + return &parser.PosError{ + Pos: node.Pos(), + Err: fmt.Errorf("%s: map must have string keys", name), + } + } + + // Make a map if it is nil + resultMap := result + if result.IsNil() { + resultMap = reflect.MakeMap( + reflect.MapOf(resultKeyType, resultElemType)) + } + + // Go through each element and decode it. + done := make(map[string]struct{}) + for _, item := range n.Items { + if item.Val == nil { + continue + } + + // github.com/hashicorp/terraform/issue/5740 + if len(item.Keys) == 0 { + return &parser.PosError{ + Pos: node.Pos(), + Err: fmt.Errorf("%s: map must have string keys", name), + } + } + + // Get the key we're dealing with, which is the first item + keyStr := item.Keys[0].Token.Value().(string) + + // If we've already processed this key, then ignore it + if _, ok := done[keyStr]; ok { + continue + } + + // Determine the value. If we have more than one key, then we + // get the objectlist of only these keys. + itemVal := item.Val + if len(item.Keys) > 1 { + itemVal = n.Filter(keyStr) + done[keyStr] = struct{}{} + } + + // Make the field name + fieldName := fmt.Sprintf("%s.%s", name, keyStr) + + // Get the key/value as reflection values + key := reflect.ValueOf(keyStr) + val := reflect.Indirect(reflect.New(resultElemType)) + + // If we have a pre-existing value in the map, use that + oldVal := resultMap.MapIndex(key) + if oldVal.IsValid() { + val.Set(oldVal) + } + + // Decode! + if err := d.decode(fieldName, itemVal, val); err != nil { + return err + } + + // Set the value on the map + resultMap.SetMapIndex(key, val) + } + + // Set the final map if we can + set.Set(resultMap) + return nil +} + +func (d *decoder) decodePtr(name string, node ast.Node, result reflect.Value) error { + // Create an element of the concrete (non pointer) type and decode + // into that. Then set the value of the pointer to this type. + resultType := result.Type() + resultElemType := resultType.Elem() + val := reflect.New(resultElemType) + if err := d.decode(name, node, reflect.Indirect(val)); err != nil { + return err + } + + result.Set(val) + return nil +} + +func (d *decoder) decodeSlice(name string, node ast.Node, result reflect.Value) error { + // If we have an interface, then we can address the interface, + // but not the slice itself, so get the element but set the interface + set := result + if result.Kind() == reflect.Interface { + result = result.Elem() + } + // Create the slice if it isn't nil + resultType := result.Type() + resultElemType := resultType.Elem() + if result.IsNil() { + resultSliceType := reflect.SliceOf(resultElemType) + result = reflect.MakeSlice( + resultSliceType, 0, 0) + } + + // Figure out the items we'll be copying into the slice + var items []ast.Node + switch n := node.(type) { + case *ast.ObjectList: + items = make([]ast.Node, len(n.Items)) + for i, item := range n.Items { + items[i] = item + } + case *ast.ObjectType: + items = []ast.Node{n} + case *ast.ListType: + items = n.List + default: + return &parser.PosError{ + Pos: node.Pos(), + Err: fmt.Errorf("unknown slice type: %T", node), + } + } + + for i, item := range items { + fieldName := fmt.Sprintf("%s[%d]", name, i) + + // Decode + val := reflect.Indirect(reflect.New(resultElemType)) + + // if item is an object that was decoded from ambiguous JSON and + // flattened, make sure it's expanded if it needs to decode into a + // defined structure. + item := expandObject(item, val) + + if err := d.decode(fieldName, item, val); err != nil { + return err + } + + // Append it onto the slice + result = reflect.Append(result, val) + } + + set.Set(result) + return nil +} + +// expandObject detects if an ambiguous JSON object was flattened to a List which +// should be decoded into a struct, and expands the ast to properly deocode. +func expandObject(node ast.Node, result reflect.Value) ast.Node { + item, ok := node.(*ast.ObjectItem) + if !ok { + return node + } + + elemType := result.Type() + + // our target type must be a struct + switch elemType.Kind() { + case reflect.Ptr: + switch elemType.Elem().Kind() { + case reflect.Struct: + //OK + default: + return node + } + case reflect.Struct: + //OK + default: + return node + } + + // A list value will have a key and field name. If it had more fields, + // it wouldn't have been flattened. + if len(item.Keys) != 2 { + return node + } + + keyToken := item.Keys[0].Token + item.Keys = item.Keys[1:] + + // we need to un-flatten the ast enough to decode + newNode := &ast.ObjectItem{ + Keys: []*ast.ObjectKey{ + &ast.ObjectKey{ + Token: keyToken, + }, + }, + Val: &ast.ObjectType{ + List: &ast.ObjectList{ + Items: []*ast.ObjectItem{item}, + }, + }, + } + + return newNode +} + +func (d *decoder) decodeString(name string, node ast.Node, result reflect.Value) error { + switch n := node.(type) { + case *ast.LiteralType: + switch n.Token.Type { + case token.NUMBER: + result.Set(reflect.ValueOf(n.Token.Text).Convert(result.Type())) + return nil + case token.STRING, token.HEREDOC: + result.Set(reflect.ValueOf(n.Token.Value()).Convert(result.Type())) + return nil + } + } + + return &parser.PosError{ + Pos: node.Pos(), + Err: fmt.Errorf("%s: unknown type for string %T", name, node), + } +} + +func (d *decoder) decodeStruct(name string, node ast.Node, result reflect.Value) error { + var item *ast.ObjectItem + if it, ok := node.(*ast.ObjectItem); ok { + item = it + node = it.Val + } + + if ot, ok := node.(*ast.ObjectType); ok { + node = ot.List + } + + // Handle the special case where the object itself is a literal. Previously + // the yacc parser would always ensure top-level elements were arrays. The new + // parser does not make the same guarantees, thus we need to convert any + // top-level literal elements into a list. + if _, ok := node.(*ast.LiteralType); ok && item != nil { + node = &ast.ObjectList{Items: []*ast.ObjectItem{item}} + } + + list, ok := node.(*ast.ObjectList) + if !ok { + return &parser.PosError{ + Pos: node.Pos(), + Err: fmt.Errorf("%s: not an object type for struct (%T)", name, node), + } + } + + // This slice will keep track of all the structs we'll be decoding. + // There can be more than one struct if there are embedded structs + // that are squashed. + structs := make([]reflect.Value, 1, 5) + structs[0] = result + + // Compile the list of all the fields that we're going to be decoding + // from all the structs. + type field struct { + field reflect.StructField + val reflect.Value + } + fields := []field{} + for len(structs) > 0 { + structVal := structs[0] + structs = structs[1:] + + structType := structVal.Type() + for i := 0; i < structType.NumField(); i++ { + fieldType := structType.Field(i) + tagParts := strings.Split(fieldType.Tag.Get(tagName), ",") + + // Ignore fields with tag name "-" + if tagParts[0] == "-" { + continue + } + + if fieldType.Anonymous { + fieldKind := fieldType.Type.Kind() + if fieldKind != reflect.Struct { + return &parser.PosError{ + Pos: node.Pos(), + Err: fmt.Errorf("%s: unsupported type to struct: %s", + fieldType.Name, fieldKind), + } + } + + // We have an embedded field. We "squash" the fields down + // if specified in the tag. + squash := false + for _, tag := range tagParts[1:] { + if tag == "squash" { + squash = true + break + } + } + + if squash { + structs = append( + structs, result.FieldByName(fieldType.Name)) + continue + } + } + + // Normal struct field, store it away + fields = append(fields, field{fieldType, structVal.Field(i)}) + } + } + + usedKeys := make(map[string]struct{}) + decodedFields := make([]string, 0, len(fields)) + decodedFieldsVal := make([]reflect.Value, 0) + unusedKeysVal := make([]reflect.Value, 0) + for _, f := range fields { + field, fieldValue := f.field, f.val + if !fieldValue.IsValid() { + // This should never happen + panic("field is not valid") + } + + // If we can't set the field, then it is unexported or something, + // and we just continue onwards. + if !fieldValue.CanSet() { + continue + } + + fieldName := field.Name + + tagValue := field.Tag.Get(tagName) + tagParts := strings.SplitN(tagValue, ",", 2) + if len(tagParts) >= 2 { + switch tagParts[1] { + case "decodedFields": + decodedFieldsVal = append(decodedFieldsVal, fieldValue) + continue + case "key": + if item == nil { + return &parser.PosError{ + Pos: node.Pos(), + Err: fmt.Errorf("%s: %s asked for 'key', impossible", + name, fieldName), + } + } + + fieldValue.SetString(item.Keys[0].Token.Value().(string)) + continue + case "unusedKeys": + unusedKeysVal = append(unusedKeysVal, fieldValue) + continue + } + } + + if tagParts[0] != "" { + fieldName = tagParts[0] + } + + // Determine the element we'll use to decode. If it is a single + // match (only object with the field), then we decode it exactly. + // If it is a prefix match, then we decode the matches. + filter := list.Filter(fieldName) + + prefixMatches := filter.Children() + matches := filter.Elem() + if len(matches.Items) == 0 && len(prefixMatches.Items) == 0 { + continue + } + + // Track the used key + usedKeys[fieldName] = struct{}{} + + // Create the field name and decode. We range over the elements + // because we actually want the value. + fieldName = fmt.Sprintf("%s.%s", name, fieldName) + if len(prefixMatches.Items) > 0 { + if err := d.decode(fieldName, prefixMatches, fieldValue); err != nil { + return err + } + } + for _, match := range matches.Items { + var decodeNode ast.Node = match.Val + if ot, ok := decodeNode.(*ast.ObjectType); ok { + decodeNode = &ast.ObjectList{Items: ot.List.Items} + } + + if err := d.decode(fieldName, decodeNode, fieldValue); err != nil { + return err + } + } + + decodedFields = append(decodedFields, field.Name) + } + + if len(decodedFieldsVal) > 0 { + // Sort it so that it is deterministic + sort.Strings(decodedFields) + + for _, v := range decodedFieldsVal { + v.Set(reflect.ValueOf(decodedFields)) + } + } + + return nil +} + +// findNodeType returns the type of ast.Node +func findNodeType() reflect.Type { + var nodeContainer struct { + Node ast.Node + } + value := reflect.ValueOf(nodeContainer).FieldByName("Node") + return value.Type() +} diff --git a/vendor/github.com/hashicorp/hcl/decoder_test.go b/vendor/github.com/hashicorp/hcl/decoder_test.go new file mode 100644 index 000000000..8682f470e --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/decoder_test.go @@ -0,0 +1,1203 @@ +package hcl + +import ( + "io/ioutil" + "path/filepath" + "reflect" + "testing" + "time" + + "github.com/davecgh/go-spew/spew" + "github.com/hashicorp/hcl/hcl/ast" +) + +func TestDecode_interface(t *testing.T) { + cases := []struct { + File string + Err bool + Out interface{} + }{ + { + "basic.hcl", + false, + map[string]interface{}{ + "foo": "bar", + "bar": "${file(\"bing/bong.txt\")}", + }, + }, + { + "basic_squish.hcl", + false, + map[string]interface{}{ + "foo": "bar", + "bar": "${file(\"bing/bong.txt\")}", + "foo-bar": "baz", + }, + }, + { + "empty.hcl", + false, + map[string]interface{}{ + "resource": []map[string]interface{}{ + map[string]interface{}{ + "foo": []map[string]interface{}{ + map[string]interface{}{}, + }, + }, + }, + }, + }, + { + "tfvars.hcl", + false, + map[string]interface{}{ + "regularvar": "Should work", + "map.key1": "Value", + "map.key2": "Other value", + }, + }, + { + "escape.hcl", + false, + map[string]interface{}{ + "foo": "bar\"baz\\n", + "qux": "back\\slash", + "bar": "new\nline", + "qax": `slash\:colon`, + "nested": `${HH\\:mm\\:ss}`, + "nestedquotes": `${"\"stringwrappedinquotes\""}`, + }, + }, + { + "float.hcl", + false, + map[string]interface{}{ + "a": 1.02, + "b": 2, + }, + }, + { + "multiline_bad.hcl", + true, + nil, + }, + { + "multiline_literal.hcl", + true, + nil, + }, + { + "multiline_literal_with_hil.hcl", + false, + map[string]interface{}{"multiline_literal_with_hil": "${hello\n world}"}, + }, + { + "multiline_no_marker.hcl", + true, + nil, + }, + { + "multiline.hcl", + false, + map[string]interface{}{"foo": "bar\nbaz\n"}, + }, + { + "multiline_indented.hcl", + false, + map[string]interface{}{"foo": " bar\n baz\n"}, + }, + { + "multiline_no_hanging_indent.hcl", + false, + map[string]interface{}{"foo": " baz\n bar\n foo\n"}, + }, + { + "multiline_no_eof.hcl", + false, + map[string]interface{}{"foo": "bar\nbaz\n", "key": "value"}, + }, + { + "multiline.json", + false, + map[string]interface{}{"foo": "bar\nbaz"}, + }, + { + "null_strings.json", + false, + map[string]interface{}{ + "module": []map[string]interface{}{ + map[string]interface{}{ + "app": []map[string]interface{}{ + map[string]interface{}{"foo": ""}, + }, + }, + }, + }, + }, + { + "scientific.json", + false, + map[string]interface{}{ + "a": 1e-10, + "b": 1e+10, + "c": 1e10, + "d": 1.2e-10, + "e": 1.2e+10, + "f": 1.2e10, + }, + }, + { + "scientific.hcl", + false, + map[string]interface{}{ + "a": 1e-10, + "b": 1e+10, + "c": 1e10, + "d": 1.2e-10, + "e": 1.2e+10, + "f": 1.2e10, + }, + }, + { + "terraform_heroku.hcl", + false, + map[string]interface{}{ + "name": "terraform-test-app", + "config_vars": []map[string]interface{}{ + map[string]interface{}{ + "FOO": "bar", + }, + }, + }, + }, + { + "structure_multi.hcl", + false, + map[string]interface{}{ + "foo": []map[string]interface{}{ + map[string]interface{}{ + "baz": []map[string]interface{}{ + map[string]interface{}{"key": 7}, + }, + }, + map[string]interface{}{ + "bar": []map[string]interface{}{ + map[string]interface{}{"key": 12}, + }, + }, + }, + }, + }, + { + "structure_multi.json", + false, + map[string]interface{}{ + "foo": []map[string]interface{}{ + map[string]interface{}{ + "baz": []map[string]interface{}{ + map[string]interface{}{"key": 7}, + }, + }, + map[string]interface{}{ + "bar": []map[string]interface{}{ + map[string]interface{}{"key": 12}, + }, + }, + }, + }, + }, + { + "list_of_lists.hcl", + false, + map[string]interface{}{ + "foo": []interface{}{ + []interface{}{"foo"}, + []interface{}{"bar"}, + }, + }, + }, + { + "list_of_maps.hcl", + false, + map[string]interface{}{ + "foo": []interface{}{ + map[string]interface{}{"somekey1": "someval1"}, + map[string]interface{}{"somekey2": "someval2", "someextrakey": "someextraval"}, + }, + }, + }, + { + "assign_deep.hcl", + false, + map[string]interface{}{ + "resource": []interface{}{ + map[string]interface{}{ + "foo": []interface{}{ + map[string]interface{}{ + "bar": []map[string]interface{}{ + map[string]interface{}{}}}}}}}, + }, + { + "structure_list.hcl", + false, + map[string]interface{}{ + "foo": []map[string]interface{}{ + map[string]interface{}{ + "key": 7, + }, + map[string]interface{}{ + "key": 12, + }, + }, + }, + }, + { + "structure_list.json", + false, + map[string]interface{}{ + "foo": []map[string]interface{}{ + map[string]interface{}{ + "key": 7, + }, + map[string]interface{}{ + "key": 12, + }, + }, + }, + }, + { + "structure_list_deep.json", + false, + map[string]interface{}{ + "bar": []map[string]interface{}{ + map[string]interface{}{ + "foo": []map[string]interface{}{ + map[string]interface{}{ + "name": "terraform_example", + "ingress": []map[string]interface{}{ + map[string]interface{}{ + "from_port": 22, + }, + map[string]interface{}{ + "from_port": 80, + }, + }, + }, + }, + }, + }, + }, + }, + + { + "structure_list_empty.json", + false, + map[string]interface{}{ + "foo": []interface{}{}, + }, + }, + + { + "nested_block_comment.hcl", + false, + map[string]interface{}{ + "bar": "value", + }, + }, + + { + "unterminated_block_comment.hcl", + true, + nil, + }, + + { + "unterminated_brace.hcl", + true, + nil, + }, + + { + "nested_provider_bad.hcl", + true, + nil, + }, + + { + "object_list.json", + false, + map[string]interface{}{ + "resource": []map[string]interface{}{ + map[string]interface{}{ + "aws_instance": []map[string]interface{}{ + map[string]interface{}{ + "db": []map[string]interface{}{ + map[string]interface{}{ + "vpc": "foo", + "provisioner": []map[string]interface{}{ + map[string]interface{}{ + "file": []map[string]interface{}{ + map[string]interface{}{ + "source": "foo", + "destination": "bar", + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + + // Terraform GH-8295 sanity test that basic decoding into + // interface{} works. + { + "terraform_variable_invalid.json", + false, + map[string]interface{}{ + "variable": []map[string]interface{}{ + map[string]interface{}{ + "whatever": "abc123", + }, + }, + }, + }, + + { + "interpolate.json", + false, + map[string]interface{}{ + "default": `${replace("europe-west", "-", " ")}`, + }, + }, + + { + "block_assign.hcl", + true, + nil, + }, + + { + "escape_backslash.hcl", + false, + map[string]interface{}{ + "output": []map[string]interface{}{ + map[string]interface{}{ + "one": `${replace(var.sub_domain, ".", "\\.")}`, + "two": `${replace(var.sub_domain, ".", "\\\\.")}`, + "many": `${replace(var.sub_domain, ".", "\\\\\\\\.")}`, + }, + }, + }, + }, + + { + "git_crypt.hcl", + true, + nil, + }, + + { + "object_with_bool.hcl", + false, + map[string]interface{}{ + "path": []map[string]interface{}{ + map[string]interface{}{ + "policy": "write", + "permissions": []map[string]interface{}{ + map[string]interface{}{ + "bool": []interface{}{false}, + }, + }, + }, + }, + }, + }, + } + + for _, tc := range cases { + t.Run(tc.File, func(t *testing.T) { + d, err := ioutil.ReadFile(filepath.Join(fixtureDir, tc.File)) + if err != nil { + t.Fatalf("err: %s", err) + } + + var out interface{} + err = Decode(&out, string(d)) + if (err != nil) != tc.Err { + t.Fatalf("Input: %s\n\nError: %s", tc.File, err) + } + + if !reflect.DeepEqual(out, tc.Out) { + t.Fatalf("Input: %s. Actual, Expected.\n\n%#v\n\n%#v", tc.File, out, tc.Out) + } + + var v interface{} + err = Unmarshal(d, &v) + if (err != nil) != tc.Err { + t.Fatalf("Input: %s\n\nError: %s", tc.File, err) + } + + if !reflect.DeepEqual(v, tc.Out) { + t.Fatalf("Input: %s. Actual, Expected.\n\n%#v\n\n%#v", tc.File, out, tc.Out) + } + }) + } +} + +func TestDecode_interfaceInline(t *testing.T) { + cases := []struct { + Value string + Err bool + Out interface{} + }{ + {"t t e{{}}", true, nil}, + {"t=0t d {}", true, map[string]interface{}{"t": 0}}, + {"v=0E0v d{}", true, map[string]interface{}{"v": float64(0)}}, + } + + for _, tc := range cases { + t.Logf("Testing: %q", tc.Value) + + var out interface{} + err := Decode(&out, tc.Value) + if (err != nil) != tc.Err { + t.Fatalf("Input: %q\n\nError: %s", tc.Value, err) + } + + if !reflect.DeepEqual(out, tc.Out) { + t.Fatalf("Input: %q. Actual, Expected.\n\n%#v\n\n%#v", tc.Value, out, tc.Out) + } + + var v interface{} + err = Unmarshal([]byte(tc.Value), &v) + if (err != nil) != tc.Err { + t.Fatalf("Input: %q\n\nError: %s", tc.Value, err) + } + + if !reflect.DeepEqual(v, tc.Out) { + t.Fatalf("Input: %q. Actual, Expected.\n\n%#v\n\n%#v", tc.Value, out, tc.Out) + } + } +} + +func TestDecode_equal(t *testing.T) { + cases := []struct { + One, Two string + }{ + { + "basic.hcl", + "basic.json", + }, + { + "float.hcl", + "float.json", + }, + /* + { + "structure.hcl", + "structure.json", + }, + */ + { + "structure.hcl", + "structure_flat.json", + }, + { + "terraform_heroku.hcl", + "terraform_heroku.json", + }, + } + + for _, tc := range cases { + p1 := filepath.Join(fixtureDir, tc.One) + p2 := filepath.Join(fixtureDir, tc.Two) + + d1, err := ioutil.ReadFile(p1) + if err != nil { + t.Fatalf("err: %s", err) + } + + d2, err := ioutil.ReadFile(p2) + if err != nil { + t.Fatalf("err: %s", err) + } + + var i1, i2 interface{} + err = Decode(&i1, string(d1)) + if err != nil { + t.Fatalf("err: %s", err) + } + + err = Decode(&i2, string(d2)) + if err != nil { + t.Fatalf("err: %s", err) + } + + if !reflect.DeepEqual(i1, i2) { + t.Fatalf( + "%s != %s\n\n%#v\n\n%#v", + tc.One, tc.Two, + i1, i2) + } + } +} + +func TestDecode_flatMap(t *testing.T) { + var val map[string]map[string]string + + err := Decode(&val, testReadFile(t, "structure_flatmap.hcl")) + if err != nil { + t.Fatalf("err: %s", err) + } + + expected := map[string]map[string]string{ + "foo": map[string]string{ + "foo": "bar", + "key": "7", + }, + } + + if !reflect.DeepEqual(val, expected) { + t.Fatalf("Actual: %#v\n\nExpected: %#v", val, expected) + } +} + +func TestDecode_structure(t *testing.T) { + type Embedded interface{} + + type V struct { + Embedded `hcl:"-"` + Key int + Foo string + } + + var actual V + + err := Decode(&actual, testReadFile(t, "flat.hcl")) + if err != nil { + t.Fatalf("err: %s", err) + } + + expected := V{ + Key: 7, + Foo: "bar", + } + + if !reflect.DeepEqual(actual, expected) { + t.Fatalf("Actual: %#v\n\nExpected: %#v", actual, expected) + } +} + +func TestDecode_structurePtr(t *testing.T) { + type V struct { + Key int + Foo string + } + + var actual *V + + err := Decode(&actual, testReadFile(t, "flat.hcl")) + if err != nil { + t.Fatalf("err: %s", err) + } + + expected := &V{ + Key: 7, + Foo: "bar", + } + + if !reflect.DeepEqual(actual, expected) { + t.Fatalf("Actual: %#v\n\nExpected: %#v", actual, expected) + } +} + +func TestDecode_structureArray(t *testing.T) { + // This test is extracted from a failure in Consul (consul.io), + // hence the interesting structure naming. + + type KeyPolicyType string + + type KeyPolicy struct { + Prefix string `hcl:",key"` + Policy KeyPolicyType + } + + type Policy struct { + Keys []KeyPolicy `hcl:"key,expand"` + } + + expected := Policy{ + Keys: []KeyPolicy{ + KeyPolicy{ + Prefix: "", + Policy: "read", + }, + KeyPolicy{ + Prefix: "foo/", + Policy: "write", + }, + KeyPolicy{ + Prefix: "foo/bar/", + Policy: "read", + }, + KeyPolicy{ + Prefix: "foo/bar/baz", + Policy: "deny", + }, + }, + } + + files := []string{ + "decode_policy.hcl", + "decode_policy.json", + } + + for _, f := range files { + var actual Policy + + err := Decode(&actual, testReadFile(t, f)) + if err != nil { + t.Fatalf("Input: %s\n\nerr: %s", f, err) + } + + if !reflect.DeepEqual(actual, expected) { + t.Fatalf("Input: %s\n\nActual: %#v\n\nExpected: %#v", f, actual, expected) + } + } +} + +func TestDecode_sliceExpand(t *testing.T) { + type testInner struct { + Name string `hcl:",key"` + Key string + } + + type testStruct struct { + Services []testInner `hcl:"service,expand"` + } + + expected := testStruct{ + Services: []testInner{ + testInner{ + Name: "my-service-0", + Key: "value", + }, + testInner{ + Name: "my-service-1", + Key: "value", + }, + }, + } + + files := []string{ + "slice_expand.hcl", + } + + for _, f := range files { + t.Logf("Testing: %s", f) + + var actual testStruct + err := Decode(&actual, testReadFile(t, f)) + if err != nil { + t.Fatalf("Input: %s\n\nerr: %s", f, err) + } + + if !reflect.DeepEqual(actual, expected) { + t.Fatalf("Input: %s\n\nActual: %#v\n\nExpected: %#v", f, actual, expected) + } + } +} + +func TestDecode_structureMap(t *testing.T) { + // This test is extracted from a failure in Terraform (terraform.io), + // hence the interesting structure naming. + + type hclVariable struct { + Default interface{} + Description string + Fields []string `hcl:",decodedFields"` + } + + type rawConfig struct { + Variable map[string]hclVariable + } + + expected := rawConfig{ + Variable: map[string]hclVariable{ + "foo": hclVariable{ + Default: "bar", + Description: "bar", + Fields: []string{"Default", "Description"}, + }, + + "amis": hclVariable{ + Default: []map[string]interface{}{ + map[string]interface{}{ + "east": "foo", + }, + }, + Fields: []string{"Default"}, + }, + }, + } + + files := []string{ + "decode_tf_variable.hcl", + "decode_tf_variable.json", + } + + for _, f := range files { + t.Logf("Testing: %s", f) + + var actual rawConfig + err := Decode(&actual, testReadFile(t, f)) + if err != nil { + t.Fatalf("Input: %s\n\nerr: %s", f, err) + } + + if !reflect.DeepEqual(actual, expected) { + t.Fatalf("Input: %s\n\nActual: %#v\n\nExpected: %#v", f, actual, expected) + } + } +} + +func TestDecode_structureMapInvalid(t *testing.T) { + // Terraform GH-8295 + + type hclVariable struct { + Default interface{} + Description string + Fields []string `hcl:",decodedFields"` + } + + type rawConfig struct { + Variable map[string]*hclVariable + } + + var actual rawConfig + err := Decode(&actual, testReadFile(t, "terraform_variable_invalid.json")) + if err == nil { + t.Fatal("expected error") + } +} + +func TestDecode_interfaceNonPointer(t *testing.T) { + var value interface{} + err := Decode(value, testReadFile(t, "basic_int_string.hcl")) + if err == nil { + t.Fatal("should error") + } +} + +func TestDecode_intString(t *testing.T) { + var value struct { + Count int + } + + err := Decode(&value, testReadFile(t, "basic_int_string.hcl")) + if err != nil { + t.Fatalf("err: %s", err) + } + + if value.Count != 3 { + t.Fatalf("bad: %#v", value.Count) + } +} + +func TestDecode_float32(t *testing.T) { + var value struct { + A float32 `hcl:"a"` + B float32 `hcl:"b"` + } + + err := Decode(&value, testReadFile(t, "float.hcl")) + if err != nil { + t.Fatalf("err: %s", err) + } + + if got, want := value.A, float32(1.02); got != want { + t.Fatalf("wrong result %#v; want %#v", got, want) + } + if got, want := value.B, float32(2); got != want { + t.Fatalf("wrong result %#v; want %#v", got, want) + } +} + +func TestDecode_float64(t *testing.T) { + var value struct { + A float64 `hcl:"a"` + B float64 `hcl:"b"` + } + + err := Decode(&value, testReadFile(t, "float.hcl")) + if err != nil { + t.Fatalf("err: %s", err) + } + + if got, want := value.A, float64(1.02); got != want { + t.Fatalf("wrong result %#v; want %#v", got, want) + } + if got, want := value.B, float64(2); got != want { + t.Fatalf("wrong result %#v; want %#v", got, want) + } +} + +func TestDecode_intStringAliased(t *testing.T) { + var value struct { + Count time.Duration + } + + err := Decode(&value, testReadFile(t, "basic_int_string.hcl")) + if err != nil { + t.Fatalf("err: %s", err) + } + + if value.Count != time.Duration(3) { + t.Fatalf("bad: %#v", value.Count) + } +} + +func TestDecode_Node(t *testing.T) { + // given + var value struct { + Content ast.Node + Nested struct { + Content ast.Node + } + } + + content := ` +content { + hello = "world" +} +` + + // when + err := Decode(&value, content) + + // then + if err != nil { + t.Errorf("unable to decode content, %v", err) + return + } + + // verify ast.Node can be decoded later + var v map[string]interface{} + err = DecodeObject(&v, value.Content) + if err != nil { + t.Errorf("unable to decode content, %v", err) + return + } + + if v["hello"] != "world" { + t.Errorf("expected mapping to be returned") + } +} + +func TestDecode_NestedNode(t *testing.T) { + // given + var value struct { + Nested struct { + Content ast.Node + } + } + + content := ` +nested "content" { + hello = "world" +} +` + + // when + err := Decode(&value, content) + + // then + if err != nil { + t.Errorf("unable to decode content, %v", err) + return + } + + // verify ast.Node can be decoded later + var v map[string]interface{} + err = DecodeObject(&v, value.Nested.Content) + if err != nil { + t.Errorf("unable to decode content, %v", err) + return + } + + if v["hello"] != "world" { + t.Errorf("expected mapping to be returned") + } +} + +// https://github.com/hashicorp/hcl/issues/60 +func TestDecode_topLevelKeys(t *testing.T) { + type Template struct { + Source string + } + + templates := struct { + Templates []*Template `hcl:"template"` + }{} + + err := Decode(&templates, ` + template { + source = "blah" + } + + template { + source = "blahblah" + }`) + + if err != nil { + t.Fatal(err) + } + + if templates.Templates[0].Source != "blah" { + t.Errorf("bad source: %s", templates.Templates[0].Source) + } + + if templates.Templates[1].Source != "blahblah" { + t.Errorf("bad source: %s", templates.Templates[1].Source) + } +} + +func TestDecode_flattenedJSON(t *testing.T) { + // make sure we can also correctly extract a Name key too + type V struct { + Name string `hcl:",key"` + Description string + Default map[string]string + } + type Vars struct { + Variable []*V + } + + cases := []struct { + JSON string + Out interface{} + Expected interface{} + }{ + { // Nested object, no sibling keys + JSON: ` +{ + "var_name": { + "default": { + "key1": "a", + "key2": "b" + } + } +} + `, + Out: &[]*V{}, + Expected: &[]*V{ + &V{ + Name: "var_name", + Default: map[string]string{"key1": "a", "key2": "b"}, + }, + }, + }, + + { // Nested object with a sibling key (this worked previously) + JSON: ` +{ + "var_name": { + "description": "Described", + "default": { + "key1": "a", + "key2": "b" + } + } +} + `, + Out: &[]*V{}, + Expected: &[]*V{ + &V{ + Name: "var_name", + Description: "Described", + Default: map[string]string{"key1": "a", "key2": "b"}, + }, + }, + }, + + { // Multiple nested objects, one with a sibling key + JSON: ` +{ + "variable": { + "var_1": { + "default": { + "key1": "a", + "key2": "b" + } + }, + "var_2": { + "description": "Described", + "default": { + "key1": "a", + "key2": "b" + } + } + } +} + `, + Out: &Vars{}, + Expected: &Vars{ + Variable: []*V{ + &V{ + Name: "var_1", + Default: map[string]string{"key1": "a", "key2": "b"}, + }, + &V{ + Name: "var_2", + Description: "Described", + Default: map[string]string{"key1": "a", "key2": "b"}, + }, + }, + }, + }, + + { // Nested object to maps + JSON: ` +{ + "variable": { + "var_name": { + "description": "Described", + "default": { + "key1": "a", + "key2": "b" + } + } + } +} + `, + Out: &[]map[string]interface{}{}, + Expected: &[]map[string]interface{}{ + { + "variable": []map[string]interface{}{ + { + "var_name": []map[string]interface{}{ + { + "description": "Described", + "default": []map[string]interface{}{ + { + "key1": "a", + "key2": "b", + }, + }, + }, + }, + }, + }, + }, + }, + }, + + { // Nested object to maps without a sibling key should decode the same as above + JSON: ` +{ + "variable": { + "var_name": { + "default": { + "key1": "a", + "key2": "b" + } + } + } +} + `, + Out: &[]map[string]interface{}{}, + Expected: &[]map[string]interface{}{ + { + "variable": []map[string]interface{}{ + { + "var_name": []map[string]interface{}{ + { + "default": []map[string]interface{}{ + { + "key1": "a", + "key2": "b", + }, + }, + }, + }, + }, + }, + }, + }, + }, + + { // Nested objects, one with a sibling key, and one without + JSON: ` +{ + "variable": { + "var_1": { + "default": { + "key1": "a", + "key2": "b" + } + }, + "var_2": { + "description": "Described", + "default": { + "key1": "a", + "key2": "b" + } + } + } +} + `, + Out: &[]map[string]interface{}{}, + Expected: &[]map[string]interface{}{ + { + "variable": []map[string]interface{}{ + { + "var_1": []map[string]interface{}{ + { + "default": []map[string]interface{}{ + { + "key1": "a", + "key2": "b", + }, + }, + }, + }, + }, + }, + }, + { + "variable": []map[string]interface{}{ + { + "var_2": []map[string]interface{}{ + { + "description": "Described", + "default": []map[string]interface{}{ + { + "key1": "a", + "key2": "b", + }, + }, + }, + }, + }, + }, + }, + }, + }, + } + + for i, tc := range cases { + err := Decode(tc.Out, tc.JSON) + if err != nil { + t.Fatalf("[%d] err: %s", i, err) + } + + if !reflect.DeepEqual(tc.Out, tc.Expected) { + t.Fatalf("[%d]\ngot: %s\nexpected: %s\n", i, spew.Sdump(tc.Out), spew.Sdump(tc.Expected)) + } + } +} diff --git a/vendor/github.com/hashicorp/hcl/hcl.go b/vendor/github.com/hashicorp/hcl/hcl.go new file mode 100644 index 000000000..575a20b50 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl.go @@ -0,0 +1,11 @@ +// Package hcl decodes HCL into usable Go structures. +// +// hcl input can come in either pure HCL format or JSON format. +// It can be parsed into an AST, and then decoded into a structure, +// or it can be decoded directly from a string into a structure. +// +// If you choose to parse HCL into a raw AST, the benefit is that you +// can write custom visitor implementations to implement custom +// semantic checks. By default, HCL does not perform any semantic +// checks. +package hcl diff --git a/vendor/github.com/hashicorp/hcl/hcl/ast/ast.go b/vendor/github.com/hashicorp/hcl/hcl/ast/ast.go new file mode 100644 index 000000000..6e5ef654b --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/ast/ast.go @@ -0,0 +1,219 @@ +// Package ast declares the types used to represent syntax trees for HCL +// (HashiCorp Configuration Language) +package ast + +import ( + "fmt" + "strings" + + "github.com/hashicorp/hcl/hcl/token" +) + +// Node is an element in the abstract syntax tree. +type Node interface { + node() + Pos() token.Pos +} + +func (File) node() {} +func (ObjectList) node() {} +func (ObjectKey) node() {} +func (ObjectItem) node() {} +func (Comment) node() {} +func (CommentGroup) node() {} +func (ObjectType) node() {} +func (LiteralType) node() {} +func (ListType) node() {} + +// File represents a single HCL file +type File struct { + Node Node // usually a *ObjectList + Comments []*CommentGroup // list of all comments in the source +} + +func (f *File) Pos() token.Pos { + return f.Node.Pos() +} + +// ObjectList represents a list of ObjectItems. An HCL file itself is an +// ObjectList. +type ObjectList struct { + Items []*ObjectItem +} + +func (o *ObjectList) Add(item *ObjectItem) { + o.Items = append(o.Items, item) +} + +// Filter filters out the objects with the given key list as a prefix. +// +// The returned list of objects contain ObjectItems where the keys have +// this prefix already stripped off. This might result in objects with +// zero-length key lists if they have no children. +// +// If no matches are found, an empty ObjectList (non-nil) is returned. +func (o *ObjectList) Filter(keys ...string) *ObjectList { + var result ObjectList + for _, item := range o.Items { + // If there aren't enough keys, then ignore this + if len(item.Keys) < len(keys) { + continue + } + + match := true + for i, key := range item.Keys[:len(keys)] { + key := key.Token.Value().(string) + if key != keys[i] && !strings.EqualFold(key, keys[i]) { + match = false + break + } + } + if !match { + continue + } + + // Strip off the prefix from the children + newItem := *item + newItem.Keys = newItem.Keys[len(keys):] + result.Add(&newItem) + } + + return &result +} + +// Children returns further nested objects (key length > 0) within this +// ObjectList. This should be used with Filter to get at child items. +func (o *ObjectList) Children() *ObjectList { + var result ObjectList + for _, item := range o.Items { + if len(item.Keys) > 0 { + result.Add(item) + } + } + + return &result +} + +// Elem returns items in the list that are direct element assignments +// (key length == 0). This should be used with Filter to get at elements. +func (o *ObjectList) Elem() *ObjectList { + var result ObjectList + for _, item := range o.Items { + if len(item.Keys) == 0 { + result.Add(item) + } + } + + return &result +} + +func (o *ObjectList) Pos() token.Pos { + // always returns the uninitiliazed position + return o.Items[0].Pos() +} + +// ObjectItem represents a HCL Object Item. An item is represented with a key +// (or keys). It can be an assignment or an object (both normal and nested) +type ObjectItem struct { + // keys is only one length long if it's of type assignment. If it's a + // nested object it can be larger than one. In that case "assign" is + // invalid as there is no assignments for a nested object. + Keys []*ObjectKey + + // assign contains the position of "=", if any + Assign token.Pos + + // val is the item itself. It can be an object,list, number, bool or a + // string. If key length is larger than one, val can be only of type + // Object. + Val Node + + LeadComment *CommentGroup // associated lead comment + LineComment *CommentGroup // associated line comment +} + +func (o *ObjectItem) Pos() token.Pos { + // I'm not entirely sure what causes this, but removing this causes + // a test failure. We should investigate at some point. + if len(o.Keys) == 0 { + return token.Pos{} + } + + return o.Keys[0].Pos() +} + +// ObjectKeys are either an identifier or of type string. +type ObjectKey struct { + Token token.Token +} + +func (o *ObjectKey) Pos() token.Pos { + return o.Token.Pos +} + +// LiteralType represents a literal of basic type. Valid types are: +// token.NUMBER, token.FLOAT, token.BOOL and token.STRING +type LiteralType struct { + Token token.Token + + // comment types, only used when in a list + LeadComment *CommentGroup + LineComment *CommentGroup +} + +func (l *LiteralType) Pos() token.Pos { + return l.Token.Pos +} + +// ListStatement represents a HCL List type +type ListType struct { + Lbrack token.Pos // position of "[" + Rbrack token.Pos // position of "]" + List []Node // the elements in lexical order +} + +func (l *ListType) Pos() token.Pos { + return l.Lbrack +} + +func (l *ListType) Add(node Node) { + l.List = append(l.List, node) +} + +// ObjectType represents a HCL Object Type +type ObjectType struct { + Lbrace token.Pos // position of "{" + Rbrace token.Pos // position of "}" + List *ObjectList // the nodes in lexical order +} + +func (o *ObjectType) Pos() token.Pos { + return o.Lbrace +} + +// Comment node represents a single //, # style or /*- style commment +type Comment struct { + Start token.Pos // position of / or # + Text string +} + +func (c *Comment) Pos() token.Pos { + return c.Start +} + +// CommentGroup node represents a sequence of comments with no other tokens and +// no empty lines between. +type CommentGroup struct { + List []*Comment // len(List) > 0 +} + +func (c *CommentGroup) Pos() token.Pos { + return c.List[0].Pos() +} + +//------------------------------------------------------------------- +// GoStringer +//------------------------------------------------------------------- + +func (o *ObjectKey) GoString() string { return fmt.Sprintf("*%#v", *o) } +func (o *ObjectList) GoString() string { return fmt.Sprintf("*%#v", *o) } diff --git a/vendor/github.com/hashicorp/hcl/hcl/ast/ast_test.go b/vendor/github.com/hashicorp/hcl/hcl/ast/ast_test.go new file mode 100644 index 000000000..942256cad --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/ast/ast_test.go @@ -0,0 +1,200 @@ +package ast + +import ( + "reflect" + "strings" + "testing" + + "github.com/hashicorp/hcl/hcl/token" +) + +func TestObjectListFilter(t *testing.T) { + var cases = []struct { + Filter []string + Input []*ObjectItem + Output []*ObjectItem + }{ + { + []string{"foo"}, + []*ObjectItem{ + &ObjectItem{ + Keys: []*ObjectKey{ + &ObjectKey{ + Token: token.Token{Type: token.STRING, Text: `"foo"`}, + }, + }, + }, + }, + []*ObjectItem{ + &ObjectItem{ + Keys: []*ObjectKey{}, + }, + }, + }, + + { + []string{"foo"}, + []*ObjectItem{ + &ObjectItem{ + Keys: []*ObjectKey{ + &ObjectKey{Token: token.Token{Type: token.STRING, Text: `"foo"`}}, + &ObjectKey{Token: token.Token{Type: token.STRING, Text: `"bar"`}}, + }, + }, + &ObjectItem{ + Keys: []*ObjectKey{ + &ObjectKey{Token: token.Token{Type: token.STRING, Text: `"baz"`}}, + }, + }, + }, + []*ObjectItem{ + &ObjectItem{ + Keys: []*ObjectKey{ + &ObjectKey{Token: token.Token{Type: token.STRING, Text: `"bar"`}}, + }, + }, + }, + }, + } + + for _, tc := range cases { + input := &ObjectList{Items: tc.Input} + expected := &ObjectList{Items: tc.Output} + if actual := input.Filter(tc.Filter...); !reflect.DeepEqual(actual, expected) { + t.Fatalf("in order: input, expected, actual\n\n%#v\n\n%#v\n\n%#v", input, expected, actual) + } + } +} + +func TestWalk(t *testing.T) { + items := []*ObjectItem{ + &ObjectItem{ + Keys: []*ObjectKey{ + &ObjectKey{Token: token.Token{Type: token.STRING, Text: `"foo"`}}, + &ObjectKey{Token: token.Token{Type: token.STRING, Text: `"bar"`}}, + }, + Val: &LiteralType{Token: token.Token{Type: token.STRING, Text: `"example"`}}, + }, + &ObjectItem{ + Keys: []*ObjectKey{ + &ObjectKey{Token: token.Token{Type: token.STRING, Text: `"baz"`}}, + }, + }, + } + + node := &ObjectList{Items: items} + + order := []string{ + "*ast.ObjectList", + "*ast.ObjectItem", + "*ast.ObjectKey", + "*ast.ObjectKey", + "*ast.LiteralType", + "*ast.ObjectItem", + "*ast.ObjectKey", + } + count := 0 + + Walk(node, func(n Node) (Node, bool) { + if n == nil { + return n, false + } + + typeName := reflect.TypeOf(n).String() + if order[count] != typeName { + t.Errorf("expected '%s' got: '%s'", order[count], typeName) + } + count++ + return n, true + }) +} + +func TestWalkEquality(t *testing.T) { + items := []*ObjectItem{ + &ObjectItem{ + Keys: []*ObjectKey{ + &ObjectKey{Token: token.Token{Type: token.STRING, Text: `"foo"`}}, + }, + }, + &ObjectItem{ + Keys: []*ObjectKey{ + &ObjectKey{Token: token.Token{Type: token.STRING, Text: `"bar"`}}, + }, + }, + } + + node := &ObjectList{Items: items} + + rewritten := Walk(node, func(n Node) (Node, bool) { return n, true }) + + newNode, ok := rewritten.(*ObjectList) + if !ok { + t.Fatalf("expected Objectlist, got %T", rewritten) + } + + if !reflect.DeepEqual(node, newNode) { + t.Fatal("rewritten node is not equal to the given node") + } + + if len(newNode.Items) != 2 { + t.Error("expected newNode length 2, got: %d", len(newNode.Items)) + } + + expected := []string{ + `"foo"`, + `"bar"`, + } + + for i, item := range newNode.Items { + if len(item.Keys) != 1 { + t.Error("expected keys newNode length 1, got: %d", len(item.Keys)) + } + + if item.Keys[0].Token.Text != expected[i] { + t.Errorf("expected key %s, got %s", expected[i], item.Keys[0].Token.Text) + } + + if item.Val != nil { + t.Errorf("expected item value should be nil") + } + } +} + +func TestWalkRewrite(t *testing.T) { + items := []*ObjectItem{ + &ObjectItem{ + Keys: []*ObjectKey{ + &ObjectKey{Token: token.Token{Type: token.STRING, Text: `"foo"`}}, + &ObjectKey{Token: token.Token{Type: token.STRING, Text: `"bar"`}}, + }, + }, + &ObjectItem{ + Keys: []*ObjectKey{ + &ObjectKey{Token: token.Token{Type: token.STRING, Text: `"baz"`}}, + }, + }, + } + + node := &ObjectList{Items: items} + + suffix := "_example" + node = Walk(node, func(n Node) (Node, bool) { + switch i := n.(type) { + case *ObjectKey: + i.Token.Text = i.Token.Text + suffix + n = i + } + return n, true + }).(*ObjectList) + + Walk(node, func(n Node) (Node, bool) { + switch i := n.(type) { + case *ObjectKey: + if !strings.HasSuffix(i.Token.Text, suffix) { + t.Errorf("Token '%s' should have suffix: %s", i.Token.Text, suffix) + } + } + return n, true + }) + +} diff --git a/vendor/github.com/hashicorp/hcl/hcl/ast/walk.go b/vendor/github.com/hashicorp/hcl/hcl/ast/walk.go new file mode 100644 index 000000000..ba07ad42b --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/ast/walk.go @@ -0,0 +1,52 @@ +package ast + +import "fmt" + +// WalkFunc describes a function to be called for each node during a Walk. The +// returned node can be used to rewrite the AST. Walking stops the returned +// bool is false. +type WalkFunc func(Node) (Node, bool) + +// Walk traverses an AST in depth-first order: It starts by calling fn(node); +// node must not be nil. If fn returns true, Walk invokes fn recursively for +// each of the non-nil children of node, followed by a call of fn(nil). The +// returned node of fn can be used to rewrite the passed node to fn. +func Walk(node Node, fn WalkFunc) Node { + rewritten, ok := fn(node) + if !ok { + return rewritten + } + + switch n := node.(type) { + case *File: + n.Node = Walk(n.Node, fn) + case *ObjectList: + for i, item := range n.Items { + n.Items[i] = Walk(item, fn).(*ObjectItem) + } + case *ObjectKey: + // nothing to do + case *ObjectItem: + for i, k := range n.Keys { + n.Keys[i] = Walk(k, fn).(*ObjectKey) + } + + if n.Val != nil { + n.Val = Walk(n.Val, fn) + } + case *LiteralType: + // nothing to do + case *ListType: + for i, l := range n.List { + n.List[i] = Walk(l, fn) + } + case *ObjectType: + n.List = Walk(n.List, fn).(*ObjectList) + default: + // should we panic here? + fmt.Printf("unknown type: %T\n", n) + } + + fn(nil) + return rewritten +} diff --git a/vendor/github.com/hashicorp/hcl/hcl/fmtcmd/fmtcmd.go b/vendor/github.com/hashicorp/hcl/hcl/fmtcmd/fmtcmd.go new file mode 100644 index 000000000..2380d71e3 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/fmtcmd/fmtcmd.go @@ -0,0 +1,162 @@ +// Derivative work from: +// - https://golang.org/src/cmd/gofmt/gofmt.go +// - https://github.com/fatih/hclfmt + +package fmtcmd + +import ( + "bytes" + "errors" + "fmt" + "io" + "io/ioutil" + "os" + "os/exec" + "path/filepath" + "strings" + + "github.com/hashicorp/hcl/hcl/printer" +) + +var ( + ErrWriteStdin = errors.New("cannot use write option with standard input") +) + +type Options struct { + List bool // list files whose formatting differs + Write bool // write result to (source) file instead of stdout + Diff bool // display diffs of formatting changes +} + +func isValidFile(f os.FileInfo, extensions []string) bool { + if !f.IsDir() && !strings.HasPrefix(f.Name(), ".") { + for _, ext := range extensions { + if strings.HasSuffix(f.Name(), "."+ext) { + return true + } + } + } + + return false +} + +// If in == nil, the source is the contents of the file with the given filename. +func processFile(filename string, in io.Reader, out io.Writer, stdin bool, opts Options) error { + if in == nil { + f, err := os.Open(filename) + if err != nil { + return err + } + defer f.Close() + in = f + } + + src, err := ioutil.ReadAll(in) + if err != nil { + return err + } + + res, err := printer.Format(src) + if err != nil { + return fmt.Errorf("In %s: %s", filename, err) + } + + if !bytes.Equal(src, res) { + // formatting has changed + if opts.List { + fmt.Fprintln(out, filename) + } + if opts.Write { + err = ioutil.WriteFile(filename, res, 0644) + if err != nil { + return err + } + } + if opts.Diff { + data, err := diff(src, res) + if err != nil { + return fmt.Errorf("computing diff: %s", err) + } + fmt.Fprintf(out, "diff a/%s b/%s\n", filename, filename) + out.Write(data) + } + } + + if !opts.List && !opts.Write && !opts.Diff { + _, err = out.Write(res) + } + + return err +} + +func walkDir(path string, extensions []string, stdout io.Writer, opts Options) error { + visitFile := func(path string, f os.FileInfo, err error) error { + if err == nil && isValidFile(f, extensions) { + err = processFile(path, nil, stdout, false, opts) + } + return err + } + + return filepath.Walk(path, visitFile) +} + +func Run( + paths, extensions []string, + stdin io.Reader, + stdout io.Writer, + opts Options, +) error { + if len(paths) == 0 { + if opts.Write { + return ErrWriteStdin + } + if err := processFile("", stdin, stdout, true, opts); err != nil { + return err + } + return nil + } + + for _, path := range paths { + switch dir, err := os.Stat(path); { + case err != nil: + return err + case dir.IsDir(): + if err := walkDir(path, extensions, stdout, opts); err != nil { + return err + } + default: + if err := processFile(path, nil, stdout, false, opts); err != nil { + return err + } + } + } + + return nil +} + +func diff(b1, b2 []byte) (data []byte, err error) { + f1, err := ioutil.TempFile("", "") + if err != nil { + return + } + defer os.Remove(f1.Name()) + defer f1.Close() + + f2, err := ioutil.TempFile("", "") + if err != nil { + return + } + defer os.Remove(f2.Name()) + defer f2.Close() + + f1.Write(b1) + f2.Write(b2) + + data, err = exec.Command("diff", "-u", f1.Name(), f2.Name()).CombinedOutput() + if len(data) > 0 { + // diff exits with a non-zero status when the files don't match. + // Ignore that failure as long as we get output. + err = nil + } + return +} diff --git a/vendor/github.com/hashicorp/hcl/hcl/fmtcmd/fmtcmd_test.go b/vendor/github.com/hashicorp/hcl/hcl/fmtcmd/fmtcmd_test.go new file mode 100644 index 000000000..b952d76d8 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/fmtcmd/fmtcmd_test.go @@ -0,0 +1,440 @@ +// +build !windows +// TODO(jen20): These need fixing on Windows but fmt is not used right now +// and red CI is making it harder to process other bugs, so ignore until +// we get around to fixing them. + +package fmtcmd + +import ( + "bytes" + "fmt" + "io/ioutil" + "os" + "path/filepath" + "reflect" + "regexp" + "sort" + "syscall" + "testing" + + "github.com/hashicorp/hcl/testhelper" +) + +var fixtureExtensions = []string{"hcl"} + +func init() { + sort.Sort(ByFilename(fixtures)) +} + +func TestIsValidFile(t *testing.T) { + const fixtureDir = "./test-fixtures" + + cases := []struct { + Path string + Expected bool + }{ + {"good.hcl", true}, + {".hidden.ignore", false}, + {"file.ignore", false}, + {"dir.ignore", false}, + } + + for _, tc := range cases { + file, err := os.Stat(filepath.Join(fixtureDir, tc.Path)) + if err != nil { + t.Errorf("unexpected error: %s", err) + } + + if res := isValidFile(file, fixtureExtensions); res != tc.Expected { + t.Errorf("want: %b, got: %b", tc.Expected, res) + } + } +} + +func TestRunMultiplePaths(t *testing.T) { + path1, err := renderFixtures("") + if err != nil { + t.Errorf("unexpected error: %s", err) + } + defer os.RemoveAll(path1) + path2, err := renderFixtures("") + if err != nil { + t.Errorf("unexpected error: %s", err) + } + defer os.RemoveAll(path2) + + var expectedOut bytes.Buffer + for _, path := range []string{path1, path2} { + for _, fixture := range fixtures { + if !bytes.Equal(fixture.golden, fixture.input) { + expectedOut.WriteString(filepath.Join(path, fixture.filename) + "\n") + } + } + } + + _, stdout := mockIO() + err = Run( + []string{path1, path2}, + fixtureExtensions, + nil, stdout, + Options{ + List: true, + }, + ) + + if err != nil { + t.Errorf("unexpected error: %s", err) + } + if stdout.String() != expectedOut.String() { + t.Errorf("stdout want:\n%s\ngot:\n%s", expectedOut, stdout) + } +} + +func TestRunSubDirectories(t *testing.T) { + pathParent, err := ioutil.TempDir("", "") + if err != nil { + t.Errorf("unexpected error: %s", err) + } + defer os.RemoveAll(pathParent) + + path1, err := renderFixtures(pathParent) + if err != nil { + t.Errorf("unexpected error: %s", err) + } + path2, err := renderFixtures(pathParent) + if err != nil { + t.Errorf("unexpected error: %s", err) + } + + paths := []string{path1, path2} + sort.Strings(paths) + + var expectedOut bytes.Buffer + for _, path := range paths { + for _, fixture := range fixtures { + if !bytes.Equal(fixture.golden, fixture.input) { + expectedOut.WriteString(filepath.Join(path, fixture.filename) + "\n") + } + } + } + + _, stdout := mockIO() + err = Run( + []string{pathParent}, + fixtureExtensions, + nil, stdout, + Options{ + List: true, + }, + ) + + if err != nil { + t.Errorf("unexpected error: %s", err) + } + if stdout.String() != expectedOut.String() { + t.Errorf("stdout want:\n%s\ngot:\n%s", expectedOut, stdout) + } +} + +func TestRunStdin(t *testing.T) { + var expectedOut bytes.Buffer + for i, fixture := range fixtures { + if i != 0 { + expectedOut.WriteString("\n") + } + expectedOut.Write(fixture.golden) + } + + stdin, stdout := mockIO() + for _, fixture := range fixtures { + stdin.Write(fixture.input) + } + + err := Run( + []string{}, + fixtureExtensions, + stdin, stdout, + Options{}, + ) + + if err != nil { + t.Errorf("unexpected error: %s", err) + } + if !bytes.Equal(stdout.Bytes(), expectedOut.Bytes()) { + t.Errorf("stdout want:\n%s\ngot:\n%s", expectedOut, stdout) + } +} + +func TestRunStdinAndWrite(t *testing.T) { + var expectedOut = []byte{} + + stdin, stdout := mockIO() + stdin.WriteString("") + err := Run( + []string{}, []string{}, + stdin, stdout, + Options{ + Write: true, + }, + ) + + if err != ErrWriteStdin { + t.Errorf("error want:\n%s\ngot:\n%s", ErrWriteStdin, err) + } + if !bytes.Equal(stdout.Bytes(), expectedOut) { + t.Errorf("stdout want:\n%s\ngot:\n%s", expectedOut, stdout) + } +} + +func TestRunFileError(t *testing.T) { + path, err := ioutil.TempDir("", "") + if err != nil { + t.Errorf("unexpected error: %s", err) + } + defer os.RemoveAll(path) + filename := filepath.Join(path, "unreadable.hcl") + + var expectedError = &os.PathError{ + Op: "open", + Path: filename, + Err: syscall.EACCES, + } + + err = ioutil.WriteFile(filename, []byte{}, 0000) + if err != nil { + t.Errorf("unexpected error: %s", err) + } + + _, stdout := mockIO() + err = Run( + []string{path}, + fixtureExtensions, + nil, stdout, + Options{}, + ) + + if !reflect.DeepEqual(err, expectedError) { + t.Errorf("error want: %#v, got: %#v", expectedError, err) + } +} + +func TestRunNoOptions(t *testing.T) { + path, err := renderFixtures("") + if err != nil { + t.Errorf("unexpected error: %s", err) + } + defer os.RemoveAll(path) + + var expectedOut bytes.Buffer + for _, fixture := range fixtures { + expectedOut.Write(fixture.golden) + } + + _, stdout := mockIO() + err = Run( + []string{path}, + fixtureExtensions, + nil, stdout, + Options{}, + ) + + if err != nil { + t.Errorf("unexpected error: %s", err) + } + if stdout.String() != expectedOut.String() { + t.Errorf("stdout want:\n%s\ngot:\n%s", expectedOut, stdout) + } +} + +func TestRunList(t *testing.T) { + path, err := renderFixtures("") + if err != nil { + t.Errorf("unexpected error: %s", err) + } + defer os.RemoveAll(path) + + var expectedOut bytes.Buffer + for _, fixture := range fixtures { + if !bytes.Equal(fixture.golden, fixture.input) { + expectedOut.WriteString(fmt.Sprintln(filepath.Join(path, fixture.filename))) + } + } + + _, stdout := mockIO() + err = Run( + []string{path}, + fixtureExtensions, + nil, stdout, + Options{ + List: true, + }, + ) + + if err != nil { + t.Errorf("unexpected error: %s", err) + } + if stdout.String() != expectedOut.String() { + t.Errorf("stdout want:\n%s\ngot:\n%s", expectedOut, stdout) + } +} + +func TestRunWrite(t *testing.T) { + path, err := renderFixtures("") + if err != nil { + t.Errorf("unexpected error: %s", err) + } + defer os.RemoveAll(path) + + _, stdout := mockIO() + err = Run( + []string{path}, + fixtureExtensions, + nil, stdout, + Options{ + Write: true, + }, + ) + + if err != nil { + t.Errorf("unexpected error: %s", err) + } + for _, fixture := range fixtures { + res, err := ioutil.ReadFile(filepath.Join(path, fixture.filename)) + if err != nil { + t.Errorf("unexpected error: %s", err) + } + if !bytes.Equal(res, fixture.golden) { + t.Errorf("file %q contents want:\n%s\ngot:\n%s", fixture.filename, fixture.golden, res) + } + } +} + +func TestRunDiff(t *testing.T) { + path, err := renderFixtures("") + if err != nil { + t.Errorf("unexpected error: %s", err) + } + defer os.RemoveAll(path) + + var expectedOut bytes.Buffer + for _, fixture := range fixtures { + if len(fixture.diff) > 0 { + expectedOut.WriteString( + regexp.QuoteMeta( + fmt.Sprintf("diff a/%s/%s b/%s/%s\n", path, fixture.filename, path, fixture.filename), + ), + ) + // Need to use regex to ignore datetimes in diff. + expectedOut.WriteString(`--- .+?\n`) + expectedOut.WriteString(`\+\+\+ .+?\n`) + expectedOut.WriteString(regexp.QuoteMeta(string(fixture.diff))) + } + } + + expectedOutString := testhelper.Unix2dos(expectedOut.String()) + + _, stdout := mockIO() + err = Run( + []string{path}, + fixtureExtensions, + nil, stdout, + Options{ + Diff: true, + }, + ) + + if err != nil { + t.Errorf("unexpected error: %s", err) + } + if !regexp.MustCompile(expectedOutString).Match(stdout.Bytes()) { + t.Errorf("stdout want match:\n%s\ngot:\n%q", expectedOutString, stdout) + } +} + +func mockIO() (stdin, stdout *bytes.Buffer) { + return new(bytes.Buffer), new(bytes.Buffer) +} + +type fixture struct { + filename string + input, golden, diff []byte +} + +type ByFilename []fixture + +func (s ByFilename) Len() int { return len(s) } +func (s ByFilename) Swap(i, j int) { s[i], s[j] = s[j], s[i] } +func (s ByFilename) Less(i, j int) bool { return len(s[i].filename) > len(s[j].filename) } + +var fixtures = []fixture{ + { + "noop.hcl", + []byte(`resource "aws_security_group" "firewall" { + count = 5 +} +`), + []byte(`resource "aws_security_group" "firewall" { + count = 5 +} +`), + []byte(``), + }, { + "align_equals.hcl", + []byte(`variable "foo" { + default = "bar" + description = "bar" +} +`), + []byte(`variable "foo" { + default = "bar" + description = "bar" +} +`), + []byte(`@@ -1,4 +1,4 @@ + variable "foo" { +- default = "bar" ++ default = "bar" + description = "bar" + } +`), + }, { + "indentation.hcl", + []byte(`provider "aws" { + access_key = "foo" + secret_key = "bar" +} +`), + []byte(`provider "aws" { + access_key = "foo" + secret_key = "bar" +} +`), + []byte(`@@ -1,4 +1,4 @@ + provider "aws" { +- access_key = "foo" +- secret_key = "bar" ++ access_key = "foo" ++ secret_key = "bar" + } +`), + }, +} + +// parent can be an empty string, in which case the system's default +// temporary directory will be used. +func renderFixtures(parent string) (path string, err error) { + path, err = ioutil.TempDir(parent, "") + if err != nil { + return "", err + } + + for _, fixture := range fixtures { + err = ioutil.WriteFile(filepath.Join(path, fixture.filename), []byte(fixture.input), 0644) + if err != nil { + os.RemoveAll(path) + return "", err + } + } + + return path, nil +} diff --git a/vendor/github.com/hashicorp/hcl/hcl/fmtcmd/test-fixtures/.hidden.ignore b/vendor/github.com/hashicorp/hcl/hcl/fmtcmd/test-fixtures/.hidden.ignore new file mode 100644 index 000000000..9977a2836 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/fmtcmd/test-fixtures/.hidden.ignore @@ -0,0 +1 @@ +invalid diff --git a/vendor/github.com/hashicorp/hcl/hcl/fmtcmd/test-fixtures/dir.ignore b/vendor/github.com/hashicorp/hcl/hcl/fmtcmd/test-fixtures/dir.ignore new file mode 100644 index 000000000..e69de29bb diff --git a/vendor/github.com/hashicorp/hcl/hcl/fmtcmd/test-fixtures/file.ignore b/vendor/github.com/hashicorp/hcl/hcl/fmtcmd/test-fixtures/file.ignore new file mode 100644 index 000000000..9977a2836 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/fmtcmd/test-fixtures/file.ignore @@ -0,0 +1 @@ +invalid diff --git a/vendor/github.com/hashicorp/hcl/hcl/fmtcmd/test-fixtures/good.hcl b/vendor/github.com/hashicorp/hcl/hcl/fmtcmd/test-fixtures/good.hcl new file mode 100644 index 000000000..e69de29bb diff --git a/vendor/github.com/hashicorp/hcl/hcl/parser/error.go b/vendor/github.com/hashicorp/hcl/hcl/parser/error.go new file mode 100644 index 000000000..5c99381df --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/parser/error.go @@ -0,0 +1,17 @@ +package parser + +import ( + "fmt" + + "github.com/hashicorp/hcl/hcl/token" +) + +// PosError is a parse error that contains a position. +type PosError struct { + Pos token.Pos + Err error +} + +func (e *PosError) Error() string { + return fmt.Sprintf("At %s: %s", e.Pos, e.Err) +} diff --git a/vendor/github.com/hashicorp/hcl/hcl/parser/error_test.go b/vendor/github.com/hashicorp/hcl/hcl/parser/error_test.go new file mode 100644 index 000000000..32399fec5 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/parser/error_test.go @@ -0,0 +1,9 @@ +package parser + +import ( + "testing" +) + +func TestPosError_impl(t *testing.T) { + var _ error = new(PosError) +} diff --git a/vendor/github.com/hashicorp/hcl/hcl/parser/parser.go b/vendor/github.com/hashicorp/hcl/hcl/parser/parser.go new file mode 100644 index 000000000..098e1bc49 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/parser/parser.go @@ -0,0 +1,526 @@ +// Package parser implements a parser for HCL (HashiCorp Configuration +// Language) +package parser + +import ( + "bytes" + "errors" + "fmt" + "strings" + + "github.com/hashicorp/hcl/hcl/ast" + "github.com/hashicorp/hcl/hcl/scanner" + "github.com/hashicorp/hcl/hcl/token" +) + +type Parser struct { + sc *scanner.Scanner + + // Last read token + tok token.Token + commaPrev token.Token + + comments []*ast.CommentGroup + leadComment *ast.CommentGroup // last lead comment + lineComment *ast.CommentGroup // last line comment + + enableTrace bool + indent int + n int // buffer size (max = 1) +} + +func newParser(src []byte) *Parser { + return &Parser{ + sc: scanner.New(src), + } +} + +// Parse returns the fully parsed source and returns the abstract syntax tree. +func Parse(src []byte) (*ast.File, error) { + // normalize all line endings + // since the scanner and output only work with "\n" line endings, we may + // end up with dangling "\r" characters in the parsed data. + src = bytes.Replace(src, []byte("\r\n"), []byte("\n"), -1) + + p := newParser(src) + return p.Parse() +} + +var errEofToken = errors.New("EOF token found") + +// Parse returns the fully parsed source and returns the abstract syntax tree. +func (p *Parser) Parse() (*ast.File, error) { + f := &ast.File{} + var err, scerr error + p.sc.Error = func(pos token.Pos, msg string) { + scerr = &PosError{Pos: pos, Err: errors.New(msg)} + } + + f.Node, err = p.objectList(false) + if scerr != nil { + return nil, scerr + } + if err != nil { + return nil, err + } + + f.Comments = p.comments + return f, nil +} + +// objectList parses a list of items within an object (generally k/v pairs). +// The parameter" obj" tells this whether to we are within an object (braces: +// '{', '}') or just at the top level. If we're within an object, we end +// at an RBRACE. +func (p *Parser) objectList(obj bool) (*ast.ObjectList, error) { + defer un(trace(p, "ParseObjectList")) + node := &ast.ObjectList{} + + for { + if obj { + tok := p.scan() + p.unscan() + if tok.Type == token.RBRACE { + break + } + } + + n, err := p.objectItem() + if err == errEofToken { + break // we are finished + } + + // we don't return a nil node, because might want to use already + // collected items. + if err != nil { + return node, err + } + + node.Add(n) + + // object lists can be optionally comma-delimited e.g. when a list of maps + // is being expressed, so a comma is allowed here - it's simply consumed + tok := p.scan() + if tok.Type != token.COMMA { + p.unscan() + } + } + return node, nil +} + +func (p *Parser) consumeComment() (comment *ast.Comment, endline int) { + endline = p.tok.Pos.Line + + // count the endline if it's multiline comment, ie starting with /* + if len(p.tok.Text) > 1 && p.tok.Text[1] == '*' { + // don't use range here - no need to decode Unicode code points + for i := 0; i < len(p.tok.Text); i++ { + if p.tok.Text[i] == '\n' { + endline++ + } + } + } + + comment = &ast.Comment{Start: p.tok.Pos, Text: p.tok.Text} + p.tok = p.sc.Scan() + return +} + +func (p *Parser) consumeCommentGroup(n int) (comments *ast.CommentGroup, endline int) { + var list []*ast.Comment + endline = p.tok.Pos.Line + + for p.tok.Type == token.COMMENT && p.tok.Pos.Line <= endline+n { + var comment *ast.Comment + comment, endline = p.consumeComment() + list = append(list, comment) + } + + // add comment group to the comments list + comments = &ast.CommentGroup{List: list} + p.comments = append(p.comments, comments) + + return +} + +// objectItem parses a single object item +func (p *Parser) objectItem() (*ast.ObjectItem, error) { + defer un(trace(p, "ParseObjectItem")) + + keys, err := p.objectKey() + if len(keys) > 0 && err == errEofToken { + // We ignore eof token here since it is an error if we didn't + // receive a value (but we did receive a key) for the item. + err = nil + } + if len(keys) > 0 && err != nil && p.tok.Type == token.RBRACE { + // This is a strange boolean statement, but what it means is: + // We have keys with no value, and we're likely in an object + // (since RBrace ends an object). For this, we set err to nil so + // we continue and get the error below of having the wrong value + // type. + err = nil + + // Reset the token type so we don't think it completed fine. See + // objectType which uses p.tok.Type to check if we're done with + // the object. + p.tok.Type = token.EOF + } + if err != nil { + return nil, err + } + + o := &ast.ObjectItem{ + Keys: keys, + } + + if p.leadComment != nil { + o.LeadComment = p.leadComment + p.leadComment = nil + } + + switch p.tok.Type { + case token.ASSIGN: + o.Assign = p.tok.Pos + o.Val, err = p.object() + if err != nil { + return nil, err + } + case token.LBRACE: + o.Val, err = p.objectType() + if err != nil { + return nil, err + } + default: + keyStr := make([]string, 0, len(keys)) + for _, k := range keys { + keyStr = append(keyStr, k.Token.Text) + } + + return nil, &PosError{ + Pos: p.tok.Pos, + Err: fmt.Errorf( + "key '%s' expected start of object ('{') or assignment ('=')", + strings.Join(keyStr, " ")), + } + } + + // do a look-ahead for line comment + p.scan() + if len(keys) > 0 && o.Val.Pos().Line == keys[0].Pos().Line && p.lineComment != nil { + o.LineComment = p.lineComment + p.lineComment = nil + } + p.unscan() + return o, nil +} + +// objectKey parses an object key and returns a ObjectKey AST +func (p *Parser) objectKey() ([]*ast.ObjectKey, error) { + keyCount := 0 + keys := make([]*ast.ObjectKey, 0) + + for { + tok := p.scan() + switch tok.Type { + case token.EOF: + // It is very important to also return the keys here as well as + // the error. This is because we need to be able to tell if we + // did parse keys prior to finding the EOF, or if we just found + // a bare EOF. + return keys, errEofToken + case token.ASSIGN: + // assignment or object only, but not nested objects. this is not + // allowed: `foo bar = {}` + if keyCount > 1 { + return nil, &PosError{ + Pos: p.tok.Pos, + Err: fmt.Errorf("nested object expected: LBRACE got: %s", p.tok.Type), + } + } + + if keyCount == 0 { + return nil, &PosError{ + Pos: p.tok.Pos, + Err: errors.New("no object keys found!"), + } + } + + return keys, nil + case token.LBRACE: + var err error + + // If we have no keys, then it is a syntax error. i.e. {{}} is not + // allowed. + if len(keys) == 0 { + err = &PosError{ + Pos: p.tok.Pos, + Err: fmt.Errorf("expected: IDENT | STRING got: %s", p.tok.Type), + } + } + + // object + return keys, err + case token.IDENT, token.STRING: + keyCount++ + keys = append(keys, &ast.ObjectKey{Token: p.tok}) + case token.ILLEGAL: + return keys, &PosError{ + Pos: p.tok.Pos, + Err: fmt.Errorf("illegal character"), + } + default: + return keys, &PosError{ + Pos: p.tok.Pos, + Err: fmt.Errorf("expected: IDENT | STRING | ASSIGN | LBRACE got: %s", p.tok.Type), + } + } + } +} + +// object parses any type of object, such as number, bool, string, object or +// list. +func (p *Parser) object() (ast.Node, error) { + defer un(trace(p, "ParseType")) + tok := p.scan() + + switch tok.Type { + case token.NUMBER, token.FLOAT, token.BOOL, token.STRING, token.HEREDOC: + return p.literalType() + case token.LBRACE: + return p.objectType() + case token.LBRACK: + return p.listType() + case token.COMMENT: + // implement comment + case token.EOF: + return nil, errEofToken + } + + return nil, &PosError{ + Pos: tok.Pos, + Err: fmt.Errorf("Unknown token: %+v", tok), + } +} + +// objectType parses an object type and returns a ObjectType AST +func (p *Parser) objectType() (*ast.ObjectType, error) { + defer un(trace(p, "ParseObjectType")) + + // we assume that the currently scanned token is a LBRACE + o := &ast.ObjectType{ + Lbrace: p.tok.Pos, + } + + l, err := p.objectList(true) + + // if we hit RBRACE, we are good to go (means we parsed all Items), if it's + // not a RBRACE, it's an syntax error and we just return it. + if err != nil && p.tok.Type != token.RBRACE { + return nil, err + } + + // No error, scan and expect the ending to be a brace + if tok := p.scan(); tok.Type != token.RBRACE { + return nil, &PosError{ + Pos: tok.Pos, + Err: fmt.Errorf("object expected closing RBRACE got: %s", tok.Type), + } + } + + o.List = l + o.Rbrace = p.tok.Pos // advanced via parseObjectList + return o, nil +} + +// listType parses a list type and returns a ListType AST +func (p *Parser) listType() (*ast.ListType, error) { + defer un(trace(p, "ParseListType")) + + // we assume that the currently scanned token is a LBRACK + l := &ast.ListType{ + Lbrack: p.tok.Pos, + } + + needComma := false + for { + tok := p.scan() + if needComma { + switch tok.Type { + case token.COMMA, token.RBRACK: + default: + return nil, &PosError{ + Pos: tok.Pos, + Err: fmt.Errorf( + "error parsing list, expected comma or list end, got: %s", + tok.Type), + } + } + } + switch tok.Type { + case token.BOOL, token.NUMBER, token.FLOAT, token.STRING, token.HEREDOC: + node, err := p.literalType() + if err != nil { + return nil, err + } + + // If there is a lead comment, apply it + if p.leadComment != nil { + node.LeadComment = p.leadComment + p.leadComment = nil + } + + l.Add(node) + needComma = true + case token.COMMA: + // get next list item or we are at the end + // do a look-ahead for line comment + p.scan() + if p.lineComment != nil && len(l.List) > 0 { + lit, ok := l.List[len(l.List)-1].(*ast.LiteralType) + if ok { + lit.LineComment = p.lineComment + l.List[len(l.List)-1] = lit + p.lineComment = nil + } + } + p.unscan() + + needComma = false + continue + case token.LBRACE: + // Looks like a nested object, so parse it out + node, err := p.objectType() + if err != nil { + return nil, &PosError{ + Pos: tok.Pos, + Err: fmt.Errorf( + "error while trying to parse object within list: %s", err), + } + } + l.Add(node) + needComma = true + case token.LBRACK: + node, err := p.listType() + if err != nil { + return nil, &PosError{ + Pos: tok.Pos, + Err: fmt.Errorf( + "error while trying to parse list within list: %s", err), + } + } + l.Add(node) + case token.RBRACK: + // finished + l.Rbrack = p.tok.Pos + return l, nil + default: + return nil, &PosError{ + Pos: tok.Pos, + Err: fmt.Errorf("unexpected token while parsing list: %s", tok.Type), + } + } + } +} + +// literalType parses a literal type and returns a LiteralType AST +func (p *Parser) literalType() (*ast.LiteralType, error) { + defer un(trace(p, "ParseLiteral")) + + return &ast.LiteralType{ + Token: p.tok, + }, nil +} + +// scan returns the next token from the underlying scanner. If a token has +// been unscanned then read that instead. In the process, it collects any +// comment groups encountered, and remembers the last lead and line comments. +func (p *Parser) scan() token.Token { + // If we have a token on the buffer, then return it. + if p.n != 0 { + p.n = 0 + return p.tok + } + + // Otherwise read the next token from the scanner and Save it to the buffer + // in case we unscan later. + prev := p.tok + p.tok = p.sc.Scan() + + if p.tok.Type == token.COMMENT { + var comment *ast.CommentGroup + var endline int + + // fmt.Printf("p.tok.Pos.Line = %+v prev: %d endline %d \n", + // p.tok.Pos.Line, prev.Pos.Line, endline) + if p.tok.Pos.Line == prev.Pos.Line { + // The comment is on same line as the previous token; it + // cannot be a lead comment but may be a line comment. + comment, endline = p.consumeCommentGroup(0) + if p.tok.Pos.Line != endline { + // The next token is on a different line, thus + // the last comment group is a line comment. + p.lineComment = comment + } + } + + // consume successor comments, if any + endline = -1 + for p.tok.Type == token.COMMENT { + comment, endline = p.consumeCommentGroup(1) + } + + if endline+1 == p.tok.Pos.Line && p.tok.Type != token.RBRACE { + switch p.tok.Type { + case token.RBRACE, token.RBRACK: + // Do not count for these cases + default: + // The next token is following on the line immediately after the + // comment group, thus the last comment group is a lead comment. + p.leadComment = comment + } + } + + } + + return p.tok +} + +// unscan pushes the previously read token back onto the buffer. +func (p *Parser) unscan() { + p.n = 1 +} + +// ---------------------------------------------------------------------------- +// Parsing support + +func (p *Parser) printTrace(a ...interface{}) { + if !p.enableTrace { + return + } + + const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . " + const n = len(dots) + fmt.Printf("%5d:%3d: ", p.tok.Pos.Line, p.tok.Pos.Column) + + i := 2 * p.indent + for i > n { + fmt.Print(dots) + i -= n + } + // i <= n + fmt.Print(dots[0:i]) + fmt.Println(a...) +} + +func trace(p *Parser, msg string) *Parser { + p.printTrace(msg, "(") + p.indent++ + return p +} + +// Usage pattern: defer un(trace(p, "...")) +func un(p *Parser) { + p.indent-- + p.printTrace(")") +} diff --git a/vendor/github.com/hashicorp/hcl/hcl/parser/parser_test.go b/vendor/github.com/hashicorp/hcl/hcl/parser/parser_test.go new file mode 100644 index 000000000..270212207 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/parser/parser_test.go @@ -0,0 +1,575 @@ +package parser + +import ( + "fmt" + "io/ioutil" + "path/filepath" + "reflect" + "runtime" + "strings" + "testing" + + "github.com/hashicorp/hcl/hcl/ast" + "github.com/hashicorp/hcl/hcl/token" +) + +func TestType(t *testing.T) { + var literals = []struct { + typ token.Type + src string + }{ + {token.STRING, `foo = "foo"`}, + {token.NUMBER, `foo = 123`}, + {token.NUMBER, `foo = -29`}, + {token.FLOAT, `foo = 123.12`}, + {token.FLOAT, `foo = -123.12`}, + {token.BOOL, `foo = true`}, + {token.HEREDOC, "foo = <= 0 { + result = p.heredocIndent(result) + } + } + + return result +} + +// objectItem returns the printable HCL form of an object item. An object type +// starts with one/multiple keys and has a value. The value might be of any +// type. +func (p *printer) objectItem(o *ast.ObjectItem) []byte { + defer un(trace(p, fmt.Sprintf("ObjectItem: %s", o.Keys[0].Token.Text))) + var buf bytes.Buffer + + if o.LeadComment != nil { + for _, comment := range o.LeadComment.List { + buf.WriteString(comment.Text) + buf.WriteByte(newline) + } + } + + for i, k := range o.Keys { + buf.WriteString(k.Token.Text) + buf.WriteByte(blank) + + // reach end of key + if o.Assign.IsValid() && i == len(o.Keys)-1 && len(o.Keys) == 1 { + buf.WriteString("=") + buf.WriteByte(blank) + } + } + + buf.Write(p.output(o.Val)) + + if o.Val.Pos().Line == o.Keys[0].Pos().Line && o.LineComment != nil { + buf.WriteByte(blank) + for _, comment := range o.LineComment.List { + buf.WriteString(comment.Text) + } + } + + return buf.Bytes() +} + +// objectType returns the printable HCL form of an object type. An object type +// begins with a brace and ends with a brace. +func (p *printer) objectType(o *ast.ObjectType) []byte { + defer un(trace(p, "ObjectType")) + var buf bytes.Buffer + buf.WriteString("{") + + var index int + var nextItem token.Pos + var commented, newlinePrinted bool + for { + // Determine the location of the next actual non-comment + // item. If we're at the end, the next item is the closing brace + if index != len(o.List.Items) { + nextItem = o.List.Items[index].Pos() + } else { + nextItem = o.Rbrace + } + + // Go through the standalone comments in the file and print out + // the comments that we should be for this object item. + for _, c := range p.standaloneComments { + printed := false + var lastCommentPos token.Pos + for _, comment := range c.List { + // We only care about comments after the previous item + // we've printed so that comments are printed in the + // correct locations (between two objects for example). + // And before the next item. + if comment.Pos().After(p.prev) && comment.Pos().Before(nextItem) { + // If there are standalone comments and the initial newline has not + // been printed yet, do it now. + if !newlinePrinted { + newlinePrinted = true + buf.WriteByte(newline) + } + + // add newline if it's between other printed nodes + if index > 0 { + commented = true + buf.WriteByte(newline) + } + + // Store this position + lastCommentPos = comment.Pos() + + // output the comment itself + buf.Write(p.indent(p.heredocIndent([]byte(comment.Text)))) + + // Set printed to true to note that we printed something + printed = true + + /* + if index != len(o.List.Items) { + buf.WriteByte(newline) // do not print on the end + } + */ + } + } + + // Stuff to do if we had comments + if printed { + // Always write a newline + buf.WriteByte(newline) + + // If there is another item in the object and our comment + // didn't hug it directly, then make sure there is a blank + // line separating them. + if nextItem != o.Rbrace && nextItem.Line != lastCommentPos.Line+1 { + buf.WriteByte(newline) + } + } + } + + if index == len(o.List.Items) { + p.prev = o.Rbrace + break + } + + // At this point we are sure that it's not a totally empty block: print + // the initial newline if it hasn't been printed yet by the previous + // block about standalone comments. + if !newlinePrinted { + buf.WriteByte(newline) + newlinePrinted = true + } + + // check if we have adjacent one liner items. If yes we'll going to align + // the comments. + var aligned []*ast.ObjectItem + for _, item := range o.List.Items[index:] { + // we don't group one line lists + if len(o.List.Items) == 1 { + break + } + + // one means a oneliner with out any lead comment + // two means a oneliner with lead comment + // anything else might be something else + cur := lines(string(p.objectItem(item))) + if cur > 2 { + break + } + + curPos := item.Pos() + + nextPos := token.Pos{} + if index != len(o.List.Items)-1 { + nextPos = o.List.Items[index+1].Pos() + } + + prevPos := token.Pos{} + if index != 0 { + prevPos = o.List.Items[index-1].Pos() + } + + // fmt.Println("DEBUG ----------------") + // fmt.Printf("prev = %+v prevPos: %s\n", prev, prevPos) + // fmt.Printf("cur = %+v curPos: %s\n", cur, curPos) + // fmt.Printf("next = %+v nextPos: %s\n", next, nextPos) + + if curPos.Line+1 == nextPos.Line { + aligned = append(aligned, item) + index++ + continue + } + + if curPos.Line-1 == prevPos.Line { + aligned = append(aligned, item) + index++ + + // finish if we have a new line or comment next. This happens + // if the next item is not adjacent + if curPos.Line+1 != nextPos.Line { + break + } + continue + } + + break + } + + // put newlines if the items are between other non aligned items. + // newlines are also added if there is a standalone comment already, so + // check it too + if !commented && index != len(aligned) { + buf.WriteByte(newline) + } + + if len(aligned) >= 1 { + p.prev = aligned[len(aligned)-1].Pos() + + items := p.alignedItems(aligned) + buf.Write(p.indent(items)) + } else { + p.prev = o.List.Items[index].Pos() + + buf.Write(p.indent(p.objectItem(o.List.Items[index]))) + index++ + } + + buf.WriteByte(newline) + } + + buf.WriteString("}") + return buf.Bytes() +} + +func (p *printer) alignedItems(items []*ast.ObjectItem) []byte { + var buf bytes.Buffer + + // find the longest key and value length, needed for alignment + var longestKeyLen int // longest key length + var longestValLen int // longest value length + for _, item := range items { + key := len(item.Keys[0].Token.Text) + val := len(p.output(item.Val)) + + if key > longestKeyLen { + longestKeyLen = key + } + + if val > longestValLen { + longestValLen = val + } + } + + for i, item := range items { + if item.LeadComment != nil { + for _, comment := range item.LeadComment.List { + buf.WriteString(comment.Text) + buf.WriteByte(newline) + } + } + + for i, k := range item.Keys { + keyLen := len(k.Token.Text) + buf.WriteString(k.Token.Text) + for i := 0; i < longestKeyLen-keyLen+1; i++ { + buf.WriteByte(blank) + } + + // reach end of key + if i == len(item.Keys)-1 && len(item.Keys) == 1 { + buf.WriteString("=") + buf.WriteByte(blank) + } + } + + val := p.output(item.Val) + valLen := len(val) + buf.Write(val) + + if item.Val.Pos().Line == item.Keys[0].Pos().Line && item.LineComment != nil { + for i := 0; i < longestValLen-valLen+1; i++ { + buf.WriteByte(blank) + } + + for _, comment := range item.LineComment.List { + buf.WriteString(comment.Text) + } + } + + // do not print for the last item + if i != len(items)-1 { + buf.WriteByte(newline) + } + } + + return buf.Bytes() +} + +// list returns the printable HCL form of an list type. +func (p *printer) list(l *ast.ListType) []byte { + var buf bytes.Buffer + buf.WriteString("[") + + var longestLine int + for _, item := range l.List { + // for now we assume that the list only contains literal types + if lit, ok := item.(*ast.LiteralType); ok { + lineLen := len(lit.Token.Text) + if lineLen > longestLine { + longestLine = lineLen + } + } + } + + insertSpaceBeforeItem := false + lastHadLeadComment := false + for i, item := range l.List { + // Keep track of whether this item is a heredoc since that has + // unique behavior. + heredoc := false + if lit, ok := item.(*ast.LiteralType); ok && lit.Token.Type == token.HEREDOC { + heredoc = true + } + + if item.Pos().Line != l.Lbrack.Line { + // multiline list, add newline before we add each item + buf.WriteByte(newline) + insertSpaceBeforeItem = false + + // If we have a lead comment, then we want to write that first + leadComment := false + if lit, ok := item.(*ast.LiteralType); ok && lit.LeadComment != nil { + leadComment = true + + // If this isn't the first item and the previous element + // didn't have a lead comment, then we need to add an extra + // newline to properly space things out. If it did have a + // lead comment previously then this would be done + // automatically. + if i > 0 && !lastHadLeadComment { + buf.WriteByte(newline) + } + + for _, comment := range lit.LeadComment.List { + buf.Write(p.indent([]byte(comment.Text))) + buf.WriteByte(newline) + } + } + + // also indent each line + val := p.output(item) + curLen := len(val) + buf.Write(p.indent(val)) + + // if this item is a heredoc, then we output the comma on + // the next line. This is the only case this happens. + comma := []byte{','} + if heredoc { + buf.WriteByte(newline) + comma = p.indent(comma) + } + + buf.Write(comma) + + if lit, ok := item.(*ast.LiteralType); ok && lit.LineComment != nil { + // if the next item doesn't have any comments, do not align + buf.WriteByte(blank) // align one space + for i := 0; i < longestLine-curLen; i++ { + buf.WriteByte(blank) + } + + for _, comment := range lit.LineComment.List { + buf.WriteString(comment.Text) + } + } + + lastItem := i == len(l.List)-1 + if lastItem { + buf.WriteByte(newline) + } + + if leadComment && !lastItem { + buf.WriteByte(newline) + } + + lastHadLeadComment = leadComment + } else { + if insertSpaceBeforeItem { + buf.WriteByte(blank) + insertSpaceBeforeItem = false + } + + // Output the item itself + // also indent each line + val := p.output(item) + curLen := len(val) + buf.Write(val) + + // If this is a heredoc item we always have to output a newline + // so that it parses properly. + if heredoc { + buf.WriteByte(newline) + } + + // If this isn't the last element, write a comma. + if i != len(l.List)-1 { + buf.WriteString(",") + insertSpaceBeforeItem = true + } + + if lit, ok := item.(*ast.LiteralType); ok && lit.LineComment != nil { + // if the next item doesn't have any comments, do not align + buf.WriteByte(blank) // align one space + for i := 0; i < longestLine-curLen; i++ { + buf.WriteByte(blank) + } + + for _, comment := range lit.LineComment.List { + buf.WriteString(comment.Text) + } + } + } + + } + + buf.WriteString("]") + return buf.Bytes() +} + +// indent indents the lines of the given buffer for each non-empty line +func (p *printer) indent(buf []byte) []byte { + var prefix []byte + if p.cfg.SpacesWidth != 0 { + for i := 0; i < p.cfg.SpacesWidth; i++ { + prefix = append(prefix, blank) + } + } else { + prefix = []byte{tab} + } + + var res []byte + bol := true + for _, c := range buf { + if bol && c != '\n' { + res = append(res, prefix...) + } + + res = append(res, c) + bol = c == '\n' + } + return res +} + +// unindent removes all the indentation from the tombstoned lines +func (p *printer) unindent(buf []byte) []byte { + var res []byte + for i := 0; i < len(buf); i++ { + skip := len(buf)-i <= len(unindent) + if !skip { + skip = !bytes.Equal(unindent, buf[i:i+len(unindent)]) + } + if skip { + res = append(res, buf[i]) + continue + } + + // We have a marker. we have to backtrace here and clean out + // any whitespace ahead of our tombstone up to a \n + for j := len(res) - 1; j >= 0; j-- { + if res[j] == '\n' { + break + } + + res = res[:j] + } + + // Skip the entire unindent marker + i += len(unindent) - 1 + } + + return res +} + +// heredocIndent marks all the 2nd and further lines as unindentable +func (p *printer) heredocIndent(buf []byte) []byte { + var res []byte + bol := false + for _, c := range buf { + if bol && c != '\n' { + res = append(res, unindent...) + } + res = append(res, c) + bol = c == '\n' + } + return res +} + +// isSingleLineObject tells whether the given object item is a single +// line object such as "obj {}". +// +// A single line object: +// +// * has no lead comments (hence multi-line) +// * has no assignment +// * has no values in the stanza (within {}) +// +func (p *printer) isSingleLineObject(val *ast.ObjectItem) bool { + // If there is a lead comment, can't be one line + if val.LeadComment != nil { + return false + } + + // If there is assignment, we always break by line + if val.Assign.IsValid() { + return false + } + + // If it isn't an object type, then its not a single line object + ot, ok := val.Val.(*ast.ObjectType) + if !ok { + return false + } + + // If the object has no items, it is single line! + return len(ot.List.Items) == 0 +} + +func lines(txt string) int { + endline := 1 + for i := 0; i < len(txt); i++ { + if txt[i] == '\n' { + endline++ + } + } + return endline +} + +// ---------------------------------------------------------------------------- +// Tracing support + +func (p *printer) printTrace(a ...interface{}) { + if !p.enableTrace { + return + } + + const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . " + const n = len(dots) + i := 2 * p.indentTrace + for i > n { + fmt.Print(dots) + i -= n + } + // i <= n + fmt.Print(dots[0:i]) + fmt.Println(a...) +} + +func trace(p *printer, msg string) *printer { + p.printTrace(msg, "(") + p.indentTrace++ + return p +} + +// Usage pattern: defer un(trace(p, "...")) +func un(p *printer) { + p.indentTrace-- + p.printTrace(")") +} diff --git a/vendor/github.com/hashicorp/hcl/hcl/printer/printer.go b/vendor/github.com/hashicorp/hcl/hcl/printer/printer.go new file mode 100644 index 000000000..6617ab8e7 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/printer/printer.go @@ -0,0 +1,66 @@ +// Package printer implements printing of AST nodes to HCL format. +package printer + +import ( + "bytes" + "io" + "text/tabwriter" + + "github.com/hashicorp/hcl/hcl/ast" + "github.com/hashicorp/hcl/hcl/parser" +) + +var DefaultConfig = Config{ + SpacesWidth: 2, +} + +// A Config node controls the output of Fprint. +type Config struct { + SpacesWidth int // if set, it will use spaces instead of tabs for alignment +} + +func (c *Config) Fprint(output io.Writer, node ast.Node) error { + p := &printer{ + cfg: *c, + comments: make([]*ast.CommentGroup, 0), + standaloneComments: make([]*ast.CommentGroup, 0), + // enableTrace: true, + } + + p.collectComments(node) + + if _, err := output.Write(p.unindent(p.output(node))); err != nil { + return err + } + + // flush tabwriter, if any + var err error + if tw, _ := output.(*tabwriter.Writer); tw != nil { + err = tw.Flush() + } + + return err +} + +// Fprint "pretty-prints" an HCL node to output +// It calls Config.Fprint with default settings. +func Fprint(output io.Writer, node ast.Node) error { + return DefaultConfig.Fprint(output, node) +} + +// Format formats src HCL and returns the result. +func Format(src []byte) ([]byte, error) { + node, err := parser.Parse(src) + if err != nil { + return nil, err + } + + var buf bytes.Buffer + if err := DefaultConfig.Fprint(&buf, node); err != nil { + return nil, err + } + + // Add trailing newline to result + buf.WriteString("\n") + return buf.Bytes(), nil +} diff --git a/vendor/github.com/hashicorp/hcl/hcl/printer/printer_test.go b/vendor/github.com/hashicorp/hcl/hcl/printer/printer_test.go new file mode 100644 index 000000000..5248259b9 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/printer/printer_test.go @@ -0,0 +1,149 @@ +package printer + +import ( + "bytes" + "errors" + "flag" + "fmt" + "io/ioutil" + "path/filepath" + "testing" + + "github.com/hashicorp/hcl/hcl/parser" +) + +var update = flag.Bool("update", false, "update golden files") + +const ( + dataDir = "testdata" +) + +type entry struct { + source, golden string +} + +// Use go test -update to create/update the respective golden files. +var data = []entry{ + {"complexhcl.input", "complexhcl.golden"}, + {"list.input", "list.golden"}, + {"list_comment.input", "list_comment.golden"}, + {"comment.input", "comment.golden"}, + {"comment_crlf.input", "comment.golden"}, + {"comment_aligned.input", "comment_aligned.golden"}, + {"comment_array.input", "comment_array.golden"}, + {"comment_end_file.input", "comment_end_file.golden"}, + {"comment_multiline_indent.input", "comment_multiline_indent.golden"}, + {"comment_multiline_no_stanza.input", "comment_multiline_no_stanza.golden"}, + {"comment_multiline_stanza.input", "comment_multiline_stanza.golden"}, + {"comment_newline.input", "comment_newline.golden"}, + {"comment_object_multi.input", "comment_object_multi.golden"}, + {"comment_standalone.input", "comment_standalone.golden"}, + {"empty_block.input", "empty_block.golden"}, + {"list_of_objects.input", "list_of_objects.golden"}, + {"multiline_string.input", "multiline_string.golden"}, + {"object_singleline.input", "object_singleline.golden"}, + {"object_with_heredoc.input", "object_with_heredoc.golden"}, +} + +func TestFiles(t *testing.T) { + for _, e := range data { + source := filepath.Join(dataDir, e.source) + golden := filepath.Join(dataDir, e.golden) + t.Run(e.source, func(t *testing.T) { + check(t, source, golden) + }) + } +} + +func check(t *testing.T, source, golden string) { + src, err := ioutil.ReadFile(source) + if err != nil { + t.Error(err) + return + } + + res, err := format(src) + if err != nil { + t.Error(err) + return + } + + // update golden files if necessary + if *update { + if err := ioutil.WriteFile(golden, res, 0644); err != nil { + t.Error(err) + } + return + } + + // get golden + gld, err := ioutil.ReadFile(golden) + if err != nil { + t.Error(err) + return + } + + // formatted source and golden must be the same + if err := diff(source, golden, res, gld); err != nil { + t.Error(err) + return + } +} + +// diff compares a and b. +func diff(aname, bname string, a, b []byte) error { + var buf bytes.Buffer // holding long error message + + // compare lengths + if len(a) != len(b) { + fmt.Fprintf(&buf, "\nlength changed: len(%s) = %d, len(%s) = %d", aname, len(a), bname, len(b)) + } + + // compare contents + line := 1 + offs := 1 + for i := 0; i < len(a) && i < len(b); i++ { + ch := a[i] + if ch != b[i] { + fmt.Fprintf(&buf, "\n%s:%d:%d: %q", aname, line, i-offs+1, lineAt(a, offs)) + fmt.Fprintf(&buf, "\n%s:%d:%d: %q", bname, line, i-offs+1, lineAt(b, offs)) + fmt.Fprintf(&buf, "\n\n") + break + } + if ch == '\n' { + line++ + offs = i + 1 + } + } + + if buf.Len() > 0 { + return errors.New(buf.String()) + } + return nil +} + +// format parses src, prints the corresponding AST, verifies the resulting +// src is syntactically correct, and returns the resulting src or an error +// if any. +func format(src []byte) ([]byte, error) { + formatted, err := Format(src) + if err != nil { + return nil, err + } + + // make sure formatted output is syntactically correct + if _, err := parser.Parse(formatted); err != nil { + return nil, fmt.Errorf("parse: %s\n%s", err, formatted) + } + + return formatted, nil +} + +// lineAt returns the line in text starting at offset offs. +func lineAt(text []byte, offs int) []byte { + i := offs + for i < len(text) && text[i] != '\n' { + i++ + } + return text[offs:i] +} diff --git a/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment.golden b/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment.golden new file mode 100644 index 000000000..9d4b072a0 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment.golden @@ -0,0 +1,36 @@ +// A standalone comment is a comment which is not attached to any kind of node + +// This comes from Terraform, as a test +variable "foo" { + # Standalone comment should be still here + + default = "bar" + description = "bar" # yooo +} + +/* This is a multi line standalone +comment*/ + +// fatih arslan +/* This is a developer test +account and a multine comment */ +developer = ["fatih", "arslan"] // fatih arslan + +# One line here +numbers = [1, 2] // another line here + +# Another comment +variable = { + description = "bar" # another yooo + + foo { + # Nested standalone + + bar = "fatih" + } +} + +// lead comment +foo { + bar = "fatih" // line comment 2 +} // line comment 3 diff --git a/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment.input b/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment.input new file mode 100644 index 000000000..57c37ac1d --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment.input @@ -0,0 +1,37 @@ +// A standalone comment is a comment which is not attached to any kind of node + + // This comes from Terraform, as a test +variable "foo" { + # Standalone comment should be still here + + default = "bar" + description = "bar" # yooo +} + +/* This is a multi line standalone +comment*/ + + +// fatih arslan +/* This is a developer test +account and a multine comment */ +developer = [ "fatih", "arslan"] // fatih arslan + +# One line here +numbers = [1,2] // another line here + + # Another comment +variable = { + description = "bar" # another yooo + foo { + # Nested standalone + + bar = "fatih" + } +} + + // lead comment +foo { + bar = "fatih" // line comment 2 +} // line comment 3 + diff --git a/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment_aligned.golden b/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment_aligned.golden new file mode 100644 index 000000000..6ff21504c --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment_aligned.golden @@ -0,0 +1,32 @@ +aligned { + # We have some aligned items below + foo = "fatih" # yoo1 + default = "bar" # yoo2 + bar = "bar and foo" # yoo3 + + default = { + bar = "example" + } + + #deneme arslan + fatih = ["fatih"] # yoo4 + + #fatih arslan + fatiharslan = ["arslan"] // yoo5 + + default = { + bar = "example" + } + + security_groups = [ + "foo", # kenya 1 + "${aws_security_group.firewall.foo}", # kenya 2 + ] + + security_groups2 = [ + "foo", # kenya 1 + "bar", # kenya 1.5 + "${aws_security_group.firewall.foo}", # kenya 2 + "foobar", # kenya 3 + ] +} diff --git a/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment_aligned.input b/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment_aligned.input new file mode 100644 index 000000000..bd43ab1ad --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment_aligned.input @@ -0,0 +1,28 @@ +aligned { +# We have some aligned items below + foo = "fatih" # yoo1 + default = "bar" # yoo2 + bar = "bar and foo" # yoo3 + default = { + bar = "example" + } + #deneme arslan + fatih = ["fatih"] # yoo4 + #fatih arslan + fatiharslan = ["arslan"] // yoo5 + default = { + bar = "example" + } + +security_groups = [ + "foo", # kenya 1 + "${aws_security_group.firewall.foo}", # kenya 2 +] + +security_groups2 = [ + "foo", # kenya 1 + "bar", # kenya 1.5 + "${aws_security_group.firewall.foo}", # kenya 2 + "foobar", # kenya 3 +] +} diff --git a/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment_array.golden b/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment_array.golden new file mode 100644 index 000000000..e778eafa3 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment_array.golden @@ -0,0 +1,13 @@ +banana = [ + # I really want to comment this item in the array. + "a", + + # This as well + "b", + + "c", # And C + "d", + + # And another + "e", +] diff --git a/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment_array.input b/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment_array.input new file mode 100644 index 000000000..e778eafa3 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment_array.input @@ -0,0 +1,13 @@ +banana = [ + # I really want to comment this item in the array. + "a", + + # This as well + "b", + + "c", # And C + "d", + + # And another + "e", +] diff --git a/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment_crlf.input b/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment_crlf.input new file mode 100644 index 000000000..5d2720672 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment_crlf.input @@ -0,0 +1,37 @@ +// A standalone comment is a comment which is not attached to any kind of node + + // This comes from Terraform, as a test +variable "foo" { + # Standalone comment should be still here + + default = "bar" + description = "bar" # yooo +} + +/* This is a multi line standalone +comment*/ + + +// fatih arslan +/* This is a developer test +account and a multine comment */ +developer = [ "fatih", "arslan"] // fatih arslan + +# One line here +numbers = [1,2] // another line here + + # Another comment +variable = { + description = "bar" # another yooo + foo { + # Nested standalone + + bar = "fatih" + } +} + + // lead comment +foo { + bar = "fatih" // line comment 2 +} // line comment 3 + diff --git a/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment_end_file.golden b/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment_end_file.golden new file mode 100644 index 000000000..dbeae36a8 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment_end_file.golden @@ -0,0 +1,6 @@ +resource "blah" "blah" {} + +// +// +// + diff --git a/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment_end_file.input b/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment_end_file.input new file mode 100644 index 000000000..68c4c282e --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment_end_file.input @@ -0,0 +1,5 @@ +resource "blah" "blah" {} + +// +// +// diff --git a/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment_multiline_indent.golden b/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment_multiline_indent.golden new file mode 100644 index 000000000..74c4ccd89 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/printer/testdata/comment_multiline_indent.golden @@ -0,0 +1,12 @@ +resource "provider" "resource" { + /* + SPACE_SENSITIVE_CODE = < 0 { + s.err("unexpected null character (0x00)") + return eof + } + + // debug + // fmt.Printf("ch: %q, offset:column: %d:%d\n", ch, s.srcPos.Offset, s.srcPos.Column) + return ch +} + +// unread unreads the previous read Rune and updates the source position +func (s *Scanner) unread() { + if err := s.buf.UnreadRune(); err != nil { + panic(err) // this is user fault, we should catch it + } + s.srcPos = s.prevPos // put back last position +} + +// peek returns the next rune without advancing the reader. +func (s *Scanner) peek() rune { + peek, _, err := s.buf.ReadRune() + if err != nil { + return eof + } + + s.buf.UnreadRune() + return peek +} + +// Scan scans the next token and returns the token. +func (s *Scanner) Scan() token.Token { + ch := s.next() + + // skip white space + for isWhitespace(ch) { + ch = s.next() + } + + var tok token.Type + + // token text markings + s.tokStart = s.srcPos.Offset - s.lastCharLen + + // token position, initial next() is moving the offset by one(size of rune + // actually), though we are interested with the starting point + s.tokPos.Offset = s.srcPos.Offset - s.lastCharLen + if s.srcPos.Column > 0 { + // common case: last character was not a '\n' + s.tokPos.Line = s.srcPos.Line + s.tokPos.Column = s.srcPos.Column + } else { + // last character was a '\n' + // (we cannot be at the beginning of the source + // since we have called next() at least once) + s.tokPos.Line = s.srcPos.Line - 1 + s.tokPos.Column = s.lastLineLen + } + + switch { + case isLetter(ch): + tok = token.IDENT + lit := s.scanIdentifier() + if lit == "true" || lit == "false" { + tok = token.BOOL + } + case isDecimal(ch): + tok = s.scanNumber(ch) + default: + switch ch { + case eof: + tok = token.EOF + case '"': + tok = token.STRING + s.scanString() + case '#', '/': + tok = token.COMMENT + s.scanComment(ch) + case '.': + tok = token.PERIOD + ch = s.peek() + if isDecimal(ch) { + tok = token.FLOAT + ch = s.scanMantissa(ch) + ch = s.scanExponent(ch) + } + case '<': + tok = token.HEREDOC + s.scanHeredoc() + case '[': + tok = token.LBRACK + case ']': + tok = token.RBRACK + case '{': + tok = token.LBRACE + case '}': + tok = token.RBRACE + case ',': + tok = token.COMMA + case '=': + tok = token.ASSIGN + case '+': + tok = token.ADD + case '-': + if isDecimal(s.peek()) { + ch := s.next() + tok = s.scanNumber(ch) + } else { + tok = token.SUB + } + default: + s.err("illegal char") + } + } + + // finish token ending + s.tokEnd = s.srcPos.Offset + + // create token literal + var tokenText string + if s.tokStart >= 0 { + tokenText = string(s.src[s.tokStart:s.tokEnd]) + } + s.tokStart = s.tokEnd // ensure idempotency of tokenText() call + + return token.Token{ + Type: tok, + Pos: s.tokPos, + Text: tokenText, + } +} + +func (s *Scanner) scanComment(ch rune) { + // single line comments + if ch == '#' || (ch == '/' && s.peek() != '*') { + if ch == '/' && s.peek() != '/' { + s.err("expected '/' for comment") + return + } + + ch = s.next() + for ch != '\n' && ch >= 0 && ch != eof { + ch = s.next() + } + if ch != eof && ch >= 0 { + s.unread() + } + return + } + + // be sure we get the character after /* This allows us to find comment's + // that are not erminated + if ch == '/' { + s.next() + ch = s.next() // read character after "/*" + } + + // look for /* - style comments + for { + if ch < 0 || ch == eof { + s.err("comment not terminated") + break + } + + ch0 := ch + ch = s.next() + if ch0 == '*' && ch == '/' { + break + } + } +} + +// scanNumber scans a HCL number definition starting with the given rune +func (s *Scanner) scanNumber(ch rune) token.Type { + if ch == '0' { + // check for hexadecimal, octal or float + ch = s.next() + if ch == 'x' || ch == 'X' { + // hexadecimal + ch = s.next() + found := false + for isHexadecimal(ch) { + ch = s.next() + found = true + } + + if !found { + s.err("illegal hexadecimal number") + } + + if ch != eof { + s.unread() + } + + return token.NUMBER + } + + // now it's either something like: 0421(octal) or 0.1231(float) + illegalOctal := false + for isDecimal(ch) { + ch = s.next() + if ch == '8' || ch == '9' { + // this is just a possibility. For example 0159 is illegal, but + // 0159.23 is valid. So we mark a possible illegal octal. If + // the next character is not a period, we'll print the error. + illegalOctal = true + } + } + + if ch == 'e' || ch == 'E' { + ch = s.scanExponent(ch) + return token.FLOAT + } + + if ch == '.' { + ch = s.scanFraction(ch) + + if ch == 'e' || ch == 'E' { + ch = s.next() + ch = s.scanExponent(ch) + } + return token.FLOAT + } + + if illegalOctal { + s.err("illegal octal number") + } + + if ch != eof { + s.unread() + } + return token.NUMBER + } + + s.scanMantissa(ch) + ch = s.next() // seek forward + if ch == 'e' || ch == 'E' { + ch = s.scanExponent(ch) + return token.FLOAT + } + + if ch == '.' { + ch = s.scanFraction(ch) + if ch == 'e' || ch == 'E' { + ch = s.next() + ch = s.scanExponent(ch) + } + return token.FLOAT + } + + if ch != eof { + s.unread() + } + return token.NUMBER +} + +// scanMantissa scans the mantissa beginning from the rune. It returns the next +// non decimal rune. It's used to determine wheter it's a fraction or exponent. +func (s *Scanner) scanMantissa(ch rune) rune { + scanned := false + for isDecimal(ch) { + ch = s.next() + scanned = true + } + + if scanned && ch != eof { + s.unread() + } + return ch +} + +// scanFraction scans the fraction after the '.' rune +func (s *Scanner) scanFraction(ch rune) rune { + if ch == '.' { + ch = s.peek() // we peek just to see if we can move forward + ch = s.scanMantissa(ch) + } + return ch +} + +// scanExponent scans the remaining parts of an exponent after the 'e' or 'E' +// rune. +func (s *Scanner) scanExponent(ch rune) rune { + if ch == 'e' || ch == 'E' { + ch = s.next() + if ch == '-' || ch == '+' { + ch = s.next() + } + ch = s.scanMantissa(ch) + } + return ch +} + +// scanHeredoc scans a heredoc string +func (s *Scanner) scanHeredoc() { + // Scan the second '<' in example: '<= len(identBytes) && identRegexp.Match(s.src[lineStart:s.srcPos.Offset-s.lastCharLen]) { + break + } + + // Not an anchor match, record the start of a new line + lineStart = s.srcPos.Offset + } + + if ch == eof { + s.err("heredoc not terminated") + return + } + } + + return +} + +// scanString scans a quoted string +func (s *Scanner) scanString() { + braces := 0 + for { + // '"' opening already consumed + // read character after quote + ch := s.next() + + if (ch == '\n' && braces == 0) || ch < 0 || ch == eof { + s.err("literal not terminated") + return + } + + if ch == '"' && braces == 0 { + break + } + + // If we're going into a ${} then we can ignore quotes for awhile + if braces == 0 && ch == '$' && s.peek() == '{' { + braces++ + s.next() + } else if braces > 0 && ch == '{' { + braces++ + } + if braces > 0 && ch == '}' { + braces-- + } + + if ch == '\\' { + s.scanEscape() + } + } + + return +} + +// scanEscape scans an escape sequence +func (s *Scanner) scanEscape() rune { + // http://en.cppreference.com/w/cpp/language/escape + ch := s.next() // read character after '/' + switch ch { + case 'a', 'b', 'f', 'n', 'r', 't', 'v', '\\', '"': + // nothing to do + case '0', '1', '2', '3', '4', '5', '6', '7': + // octal notation + ch = s.scanDigits(ch, 8, 3) + case 'x': + // hexademical notation + ch = s.scanDigits(s.next(), 16, 2) + case 'u': + // universal character name + ch = s.scanDigits(s.next(), 16, 4) + case 'U': + // universal character name + ch = s.scanDigits(s.next(), 16, 8) + default: + s.err("illegal char escape") + } + return ch +} + +// scanDigits scans a rune with the given base for n times. For example an +// octal notation \184 would yield in scanDigits(ch, 8, 3) +func (s *Scanner) scanDigits(ch rune, base, n int) rune { + start := n + for n > 0 && digitVal(ch) < base { + ch = s.next() + if ch == eof { + // If we see an EOF, we halt any more scanning of digits + // immediately. + break + } + + n-- + } + if n > 0 { + s.err("illegal char escape") + } + + if n != start { + // we scanned all digits, put the last non digit char back, + // only if we read anything at all + s.unread() + } + + return ch +} + +// scanIdentifier scans an identifier and returns the literal string +func (s *Scanner) scanIdentifier() string { + offs := s.srcPos.Offset - s.lastCharLen + ch := s.next() + for isLetter(ch) || isDigit(ch) || ch == '-' || ch == '.' { + ch = s.next() + } + + if ch != eof { + s.unread() // we got identifier, put back latest char + } + + return string(s.src[offs:s.srcPos.Offset]) +} + +// recentPosition returns the position of the character immediately after the +// character or token returned by the last call to Scan. +func (s *Scanner) recentPosition() (pos token.Pos) { + pos.Offset = s.srcPos.Offset - s.lastCharLen + switch { + case s.srcPos.Column > 0: + // common case: last character was not a '\n' + pos.Line = s.srcPos.Line + pos.Column = s.srcPos.Column + case s.lastLineLen > 0: + // last character was a '\n' + // (we cannot be at the beginning of the source + // since we have called next() at least once) + pos.Line = s.srcPos.Line - 1 + pos.Column = s.lastLineLen + default: + // at the beginning of the source + pos.Line = 1 + pos.Column = 1 + } + return +} + +// err prints the error of any scanning to s.Error function. If the function is +// not defined, by default it prints them to os.Stderr +func (s *Scanner) err(msg string) { + s.ErrorCount++ + pos := s.recentPosition() + + if s.Error != nil { + s.Error(pos, msg) + return + } + + fmt.Fprintf(os.Stderr, "%s: %s\n", pos, msg) +} + +// isHexadecimal returns true if the given rune is a letter +func isLetter(ch rune) bool { + return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || ch == '_' || ch >= 0x80 && unicode.IsLetter(ch) +} + +// isDigit returns true if the given rune is a decimal digit +func isDigit(ch rune) bool { + return '0' <= ch && ch <= '9' || ch >= 0x80 && unicode.IsDigit(ch) +} + +// isDecimal returns true if the given rune is a decimal number +func isDecimal(ch rune) bool { + return '0' <= ch && ch <= '9' +} + +// isHexadecimal returns true if the given rune is an hexadecimal number +func isHexadecimal(ch rune) bool { + return '0' <= ch && ch <= '9' || 'a' <= ch && ch <= 'f' || 'A' <= ch && ch <= 'F' +} + +// isWhitespace returns true if the rune is a space, tab, newline or carriage return +func isWhitespace(ch rune) bool { + return ch == ' ' || ch == '\t' || ch == '\n' || ch == '\r' +} + +// digitVal returns the integer value of a given octal,decimal or hexadecimal rune +func digitVal(ch rune) int { + switch { + case '0' <= ch && ch <= '9': + return int(ch - '0') + case 'a' <= ch && ch <= 'f': + return int(ch - 'a' + 10) + case 'A' <= ch && ch <= 'F': + return int(ch - 'A' + 10) + } + return 16 // larger than any legal digit val +} diff --git a/vendor/github.com/hashicorp/hcl/hcl/scanner/scanner_test.go b/vendor/github.com/hashicorp/hcl/hcl/scanner/scanner_test.go new file mode 100644 index 000000000..4f2c9cbe0 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/scanner/scanner_test.go @@ -0,0 +1,591 @@ +package scanner + +import ( + "bytes" + "fmt" + "testing" + + "strings" + + "github.com/hashicorp/hcl/hcl/token" +) + +var f100 = strings.Repeat("f", 100) + +type tokenPair struct { + tok token.Type + text string +} + +var tokenLists = map[string][]tokenPair{ + "comment": []tokenPair{ + {token.COMMENT, "//"}, + {token.COMMENT, "////"}, + {token.COMMENT, "// comment"}, + {token.COMMENT, "// /* comment */"}, + {token.COMMENT, "// // comment //"}, + {token.COMMENT, "//" + f100}, + {token.COMMENT, "#"}, + {token.COMMENT, "##"}, + {token.COMMENT, "# comment"}, + {token.COMMENT, "# /* comment */"}, + {token.COMMENT, "# # comment #"}, + {token.COMMENT, "#" + f100}, + {token.COMMENT, "/**/"}, + {token.COMMENT, "/***/"}, + {token.COMMENT, "/* comment */"}, + {token.COMMENT, "/* // comment */"}, + {token.COMMENT, "/* /* comment */"}, + {token.COMMENT, "/*\n comment\n*/"}, + {token.COMMENT, "/*" + f100 + "*/"}, + }, + "operator": []tokenPair{ + {token.LBRACK, "["}, + {token.LBRACE, "{"}, + {token.COMMA, ","}, + {token.PERIOD, "."}, + {token.RBRACK, "]"}, + {token.RBRACE, "}"}, + {token.ASSIGN, "="}, + {token.ADD, "+"}, + {token.SUB, "-"}, + }, + "bool": []tokenPair{ + {token.BOOL, "true"}, + {token.BOOL, "false"}, + }, + "ident": []tokenPair{ + {token.IDENT, "a"}, + {token.IDENT, "a0"}, + {token.IDENT, "foobar"}, + {token.IDENT, "foo-bar"}, + {token.IDENT, "abc123"}, + {token.IDENT, "LGTM"}, + {token.IDENT, "_"}, + {token.IDENT, "_abc123"}, + {token.IDENT, "abc123_"}, + {token.IDENT, "_abc_123_"}, + {token.IDENT, "_äöü"}, + {token.IDENT, "_本"}, + {token.IDENT, "äöü"}, + {token.IDENT, "本"}, + {token.IDENT, "a۰۱۸"}, + {token.IDENT, "foo६४"}, + {token.IDENT, "bar9876"}, + }, + "heredoc": []tokenPair{ + {token.HEREDOC, "< 0 for %q", s.ErrorCount, src) + } +} + +func testTokenList(t *testing.T, tokenList []tokenPair) { + // create artifical source code + buf := new(bytes.Buffer) + for _, ident := range tokenList { + fmt.Fprintf(buf, "%s\n", ident.text) + } + + s := New(buf.Bytes()) + for _, ident := range tokenList { + tok := s.Scan() + if tok.Type != ident.tok { + t.Errorf("tok = %q want %q for %q\n", tok, ident.tok, ident.text) + } + + if tok.Text != ident.text { + t.Errorf("text = %q want %q", tok.String(), ident.text) + } + + } +} + +func countNewlines(s string) int { + n := 0 + for _, ch := range s { + if ch == '\n' { + n++ + } + } + return n +} diff --git a/vendor/github.com/hashicorp/hcl/hcl/strconv/quote.go b/vendor/github.com/hashicorp/hcl/hcl/strconv/quote.go new file mode 100644 index 000000000..5f981eaa2 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/strconv/quote.go @@ -0,0 +1,241 @@ +package strconv + +import ( + "errors" + "unicode/utf8" +) + +// ErrSyntax indicates that a value does not have the right syntax for the target type. +var ErrSyntax = errors.New("invalid syntax") + +// Unquote interprets s as a single-quoted, double-quoted, +// or backquoted Go string literal, returning the string value +// that s quotes. (If s is single-quoted, it would be a Go +// character literal; Unquote returns the corresponding +// one-character string.) +func Unquote(s string) (t string, err error) { + n := len(s) + if n < 2 { + return "", ErrSyntax + } + quote := s[0] + if quote != s[n-1] { + return "", ErrSyntax + } + s = s[1 : n-1] + + if quote != '"' { + return "", ErrSyntax + } + if !contains(s, '$') && !contains(s, '{') && contains(s, '\n') { + return "", ErrSyntax + } + + // Is it trivial? Avoid allocation. + if !contains(s, '\\') && !contains(s, quote) && !contains(s, '$') { + switch quote { + case '"': + return s, nil + case '\'': + r, size := utf8.DecodeRuneInString(s) + if size == len(s) && (r != utf8.RuneError || size != 1) { + return s, nil + } + } + } + + var runeTmp [utf8.UTFMax]byte + buf := make([]byte, 0, 3*len(s)/2) // Try to avoid more allocations. + for len(s) > 0 { + // If we're starting a '${}' then let it through un-unquoted. + // Specifically: we don't unquote any characters within the `${}` + // section. + if s[0] == '$' && len(s) > 1 && s[1] == '{' { + buf = append(buf, '$', '{') + s = s[2:] + + // Continue reading until we find the closing brace, copying as-is + braces := 1 + for len(s) > 0 && braces > 0 { + r, size := utf8.DecodeRuneInString(s) + if r == utf8.RuneError { + return "", ErrSyntax + } + + s = s[size:] + + n := utf8.EncodeRune(runeTmp[:], r) + buf = append(buf, runeTmp[:n]...) + + switch r { + case '{': + braces++ + case '}': + braces-- + } + } + if braces != 0 { + return "", ErrSyntax + } + if len(s) == 0 { + // If there's no string left, we're done! + break + } else { + // If there's more left, we need to pop back up to the top of the loop + // in case there's another interpolation in this string. + continue + } + } + + if s[0] == '\n' { + return "", ErrSyntax + } + + c, multibyte, ss, err := unquoteChar(s, quote) + if err != nil { + return "", err + } + s = ss + if c < utf8.RuneSelf || !multibyte { + buf = append(buf, byte(c)) + } else { + n := utf8.EncodeRune(runeTmp[:], c) + buf = append(buf, runeTmp[:n]...) + } + if quote == '\'' && len(s) != 0 { + // single-quoted must be single character + return "", ErrSyntax + } + } + return string(buf), nil +} + +// contains reports whether the string contains the byte c. +func contains(s string, c byte) bool { + for i := 0; i < len(s); i++ { + if s[i] == c { + return true + } + } + return false +} + +func unhex(b byte) (v rune, ok bool) { + c := rune(b) + switch { + case '0' <= c && c <= '9': + return c - '0', true + case 'a' <= c && c <= 'f': + return c - 'a' + 10, true + case 'A' <= c && c <= 'F': + return c - 'A' + 10, true + } + return +} + +func unquoteChar(s string, quote byte) (value rune, multibyte bool, tail string, err error) { + // easy cases + switch c := s[0]; { + case c == quote && (quote == '\'' || quote == '"'): + err = ErrSyntax + return + case c >= utf8.RuneSelf: + r, size := utf8.DecodeRuneInString(s) + return r, true, s[size:], nil + case c != '\\': + return rune(s[0]), false, s[1:], nil + } + + // hard case: c is backslash + if len(s) <= 1 { + err = ErrSyntax + return + } + c := s[1] + s = s[2:] + + switch c { + case 'a': + value = '\a' + case 'b': + value = '\b' + case 'f': + value = '\f' + case 'n': + value = '\n' + case 'r': + value = '\r' + case 't': + value = '\t' + case 'v': + value = '\v' + case 'x', 'u', 'U': + n := 0 + switch c { + case 'x': + n = 2 + case 'u': + n = 4 + case 'U': + n = 8 + } + var v rune + if len(s) < n { + err = ErrSyntax + return + } + for j := 0; j < n; j++ { + x, ok := unhex(s[j]) + if !ok { + err = ErrSyntax + return + } + v = v<<4 | x + } + s = s[n:] + if c == 'x' { + // single-byte string, possibly not UTF-8 + value = v + break + } + if v > utf8.MaxRune { + err = ErrSyntax + return + } + value = v + multibyte = true + case '0', '1', '2', '3', '4', '5', '6', '7': + v := rune(c) - '0' + if len(s) < 2 { + err = ErrSyntax + return + } + for j := 0; j < 2; j++ { // one digit already; two more + x := rune(s[j]) - '0' + if x < 0 || x > 7 { + err = ErrSyntax + return + } + v = (v << 3) | x + } + s = s[2:] + if v > 255 { + err = ErrSyntax + return + } + value = v + case '\\': + value = '\\' + case '\'', '"': + if c != quote { + err = ErrSyntax + return + } + value = rune(c) + default: + err = ErrSyntax + return + } + tail = s + return +} diff --git a/vendor/github.com/hashicorp/hcl/hcl/strconv/quote_test.go b/vendor/github.com/hashicorp/hcl/hcl/strconv/quote_test.go new file mode 100644 index 000000000..65be375d9 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/strconv/quote_test.go @@ -0,0 +1,96 @@ +package strconv + +import "testing" + +type quoteTest struct { + in string + out string + ascii string +} + +var quotetests = []quoteTest{ + {"\a\b\f\r\n\t\v", `"\a\b\f\r\n\t\v"`, `"\a\b\f\r\n\t\v"`}, + {"\\", `"\\"`, `"\\"`}, + {"abc\xffdef", `"abc\xffdef"`, `"abc\xffdef"`}, + {"\u263a", `"☺"`, `"\u263a"`}, + {"\U0010ffff", `"\U0010ffff"`, `"\U0010ffff"`}, + {"\x04", `"\x04"`, `"\x04"`}, +} + +type unQuoteTest struct { + in string + out string +} + +var unquotetests = []unQuoteTest{ + {`""`, ""}, + {`"a"`, "a"}, + {`"abc"`, "abc"}, + {`"☺"`, "☺"}, + {`"hello world"`, "hello world"}, + {`"\xFF"`, "\xFF"}, + {`"\377"`, "\377"}, + {`"\u1234"`, "\u1234"}, + {`"\U00010111"`, "\U00010111"}, + {`"\U0001011111"`, "\U0001011111"}, + {`"\a\b\f\n\r\t\v\\\""`, "\a\b\f\n\r\t\v\\\""}, + {`"'"`, "'"}, + {`"${file("foo")}"`, `${file("foo")}`}, + {`"${file("\"foo\"")}"`, `${file("\"foo\"")}`}, + {`"echo ${var.region}${element(split(",",var.zones),0)}"`, + `echo ${var.region}${element(split(",",var.zones),0)}`}, + {`"${HH\\:mm\\:ss}"`, `${HH\\:mm\\:ss}`}, + {`"${\n}"`, `${\n}`}, +} + +var misquoted = []string{ + ``, + `"`, + `"a`, + `"'`, + `b"`, + `"\"`, + `"\9"`, + `"\19"`, + `"\129"`, + `'\'`, + `'\9'`, + `'\19'`, + `'\129'`, + `'ab'`, + `"\x1!"`, + `"\U12345678"`, + `"\z"`, + "`", + "`xxx", + "`\"", + `"\'"`, + `'\"'`, + "\"\n\"", + "\"\\n\n\"", + "'\n'", + `"${"`, + `"${foo{}"`, + "\"${foo}\n\"", +} + +func TestUnquote(t *testing.T) { + for _, tt := range unquotetests { + if out, err := Unquote(tt.in); err != nil || out != tt.out { + t.Errorf("Unquote(%#q) = %q, %v want %q, nil", tt.in, out, err, tt.out) + } + } + + // run the quote tests too, backward + for _, tt := range quotetests { + if in, err := Unquote(tt.out); in != tt.in { + t.Errorf("Unquote(%#q) = %q, %v, want %q, nil", tt.out, in, err, tt.in) + } + } + + for _, s := range misquoted { + if out, err := Unquote(s); out != "" || err != ErrSyntax { + t.Errorf("Unquote(%#q) = %q, %v want %q, %v", s, out, err, "", ErrSyntax) + } + } +} diff --git a/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/array_comment.hcl b/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/array_comment.hcl new file mode 100644 index 000000000..78c267582 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/array_comment.hcl @@ -0,0 +1,4 @@ +foo = [ + "1", + "2", # comment +] diff --git a/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/assign_colon.hcl b/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/assign_colon.hcl new file mode 100644 index 000000000..eb5a99a69 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/assign_colon.hcl @@ -0,0 +1,6 @@ +resource = [{ + "foo": { + "bar": {}, + "baz": [1, 2, "foo"], + } +}] diff --git a/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/comment.hcl b/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/comment.hcl new file mode 100644 index 000000000..1ff7f29fd --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/comment.hcl @@ -0,0 +1,15 @@ +// Foo + +/* Bar */ + +/* +/* +Baz +*/ + +# Another + +# Multiple +# Lines + +foo = "bar" diff --git a/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/comment_single.hcl b/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/comment_single.hcl new file mode 100644 index 000000000..fec56017d --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/comment_single.hcl @@ -0,0 +1 @@ +# Hello diff --git a/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/complex.hcl b/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/complex.hcl new file mode 100644 index 000000000..cccb5b06f --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/complex.hcl @@ -0,0 +1,42 @@ +// This comes from Terraform, as a test +variable "foo" { + default = "bar" + description = "bar" +} + +provider "aws" { + access_key = "foo" + secret_key = "bar" +} + +provider "do" { + api_key = "${var.foo}" +} + +resource "aws_security_group" "firewall" { + count = 5 +} + +resource aws_instance "web" { + ami = "${var.foo}" + security_groups = [ + "foo", + "${aws_security_group.firewall.foo}" + ] + + network_interface { + device_index = 0 + description = "Main network interface" + } +} + +resource "aws_instance" "db" { + security_groups = "${aws_security_group.firewall.*.id}" + VPC = "foo" + + depends_on = ["aws_instance.web"] +} + +output "web_ip" { + value = "${aws_instance.web.private_ip}" +} diff --git a/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/complex_key.hcl b/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/complex_key.hcl new file mode 100644 index 000000000..0007aaf5f --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/complex_key.hcl @@ -0,0 +1 @@ +foo.bar = "baz" diff --git a/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/empty.hcl b/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/empty.hcl new file mode 100644 index 000000000..e69de29bb diff --git a/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/list.hcl b/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/list.hcl new file mode 100644 index 000000000..059d4ce65 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/list.hcl @@ -0,0 +1 @@ +foo = [1, 2, "foo"] diff --git a/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/list_comma.hcl b/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/list_comma.hcl new file mode 100644 index 000000000..50f4218ac --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/list_comma.hcl @@ -0,0 +1 @@ +foo = [1, 2, "foo",] diff --git a/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/multiple.hcl b/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/multiple.hcl new file mode 100644 index 000000000..029c54b0c --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/multiple.hcl @@ -0,0 +1,2 @@ +foo = "bar" +key = 7 diff --git a/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/old.hcl b/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/old.hcl new file mode 100644 index 000000000..e9f77cae9 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/old.hcl @@ -0,0 +1,3 @@ +default = { + "eu-west-1": "ami-b1cf19c6", +} diff --git a/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/structure.hcl b/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/structure.hcl new file mode 100644 index 000000000..92592fbb3 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/structure.hcl @@ -0,0 +1,5 @@ +// This is a test structure for the lexer +foo bar "baz" { + key = 7 + foo = "bar" +} diff --git a/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/structure_basic.hcl b/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/structure_basic.hcl new file mode 100644 index 000000000..7229a1f01 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/structure_basic.hcl @@ -0,0 +1,5 @@ +foo { + value = 7 + "value" = 8 + "complex::value" = 9 +} diff --git a/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/structure_empty.hcl b/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/structure_empty.hcl new file mode 100644 index 000000000..4d156ddea --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/structure_empty.hcl @@ -0,0 +1 @@ +resource "foo" "bar" {} diff --git a/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/types.hcl b/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/types.hcl new file mode 100644 index 000000000..cf2747ea1 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/test-fixtures/types.hcl @@ -0,0 +1,7 @@ +foo = "bar" +bar = 7 +baz = [1,2,3] +foo = -12 +bar = 3.14159 +foo = true +bar = false diff --git a/vendor/github.com/hashicorp/hcl/hcl/token/position.go b/vendor/github.com/hashicorp/hcl/hcl/token/position.go new file mode 100644 index 000000000..59c1bb72d --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/token/position.go @@ -0,0 +1,46 @@ +package token + +import "fmt" + +// Pos describes an arbitrary source position +// including the file, line, and column location. +// A Position is valid if the line number is > 0. +type Pos struct { + Filename string // filename, if any + Offset int // offset, starting at 0 + Line int // line number, starting at 1 + Column int // column number, starting at 1 (character count) +} + +// IsValid returns true if the position is valid. +func (p *Pos) IsValid() bool { return p.Line > 0 } + +// String returns a string in one of several forms: +// +// file:line:column valid position with file name +// line:column valid position without file name +// file invalid position with file name +// - invalid position without file name +func (p Pos) String() string { + s := p.Filename + if p.IsValid() { + if s != "" { + s += ":" + } + s += fmt.Sprintf("%d:%d", p.Line, p.Column) + } + if s == "" { + s = "-" + } + return s +} + +// Before reports whether the position p is before u. +func (p Pos) Before(u Pos) bool { + return u.Offset > p.Offset || u.Line > p.Line +} + +// After reports whether the position p is after u. +func (p Pos) After(u Pos) bool { + return u.Offset < p.Offset || u.Line < p.Line +} diff --git a/vendor/github.com/hashicorp/hcl/hcl/token/token.go b/vendor/github.com/hashicorp/hcl/hcl/token/token.go new file mode 100644 index 000000000..e37c0664e --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/token/token.go @@ -0,0 +1,219 @@ +// Package token defines constants representing the lexical tokens for HCL +// (HashiCorp Configuration Language) +package token + +import ( + "fmt" + "strconv" + "strings" + + hclstrconv "github.com/hashicorp/hcl/hcl/strconv" +) + +// Token defines a single HCL token which can be obtained via the Scanner +type Token struct { + Type Type + Pos Pos + Text string + JSON bool +} + +// Type is the set of lexical tokens of the HCL (HashiCorp Configuration Language) +type Type int + +const ( + // Special tokens + ILLEGAL Type = iota + EOF + COMMENT + + identifier_beg + IDENT // literals + literal_beg + NUMBER // 12345 + FLOAT // 123.45 + BOOL // true,false + STRING // "abc" + HEREDOC // < 0 { + // Pop the current item + n := len(frontier) + item := frontier[n-1] + frontier = frontier[:n-1] + + switch v := item.Val.(type) { + case *ast.ObjectType: + items, frontier = flattenObjectType(v, item, items, frontier) + case *ast.ListType: + items, frontier = flattenListType(v, item, items, frontier) + default: + items = append(items, item) + } + } + + // Reverse the list since the frontier model runs things backwards + for i := len(items)/2 - 1; i >= 0; i-- { + opp := len(items) - 1 - i + items[i], items[opp] = items[opp], items[i] + } + + // Done! Set the original items + list.Items = items + return n, true + }) +} + +func flattenListType( + ot *ast.ListType, + item *ast.ObjectItem, + items []*ast.ObjectItem, + frontier []*ast.ObjectItem) ([]*ast.ObjectItem, []*ast.ObjectItem) { + // If the list is empty, keep the original list + if len(ot.List) == 0 { + items = append(items, item) + return items, frontier + } + + // All the elements of this object must also be objects! + for _, subitem := range ot.List { + if _, ok := subitem.(*ast.ObjectType); !ok { + items = append(items, item) + return items, frontier + } + } + + // Great! We have a match go through all the items and flatten + for _, elem := range ot.List { + // Add it to the frontier so that we can recurse + frontier = append(frontier, &ast.ObjectItem{ + Keys: item.Keys, + Assign: item.Assign, + Val: elem, + LeadComment: item.LeadComment, + LineComment: item.LineComment, + }) + } + + return items, frontier +} + +func flattenObjectType( + ot *ast.ObjectType, + item *ast.ObjectItem, + items []*ast.ObjectItem, + frontier []*ast.ObjectItem) ([]*ast.ObjectItem, []*ast.ObjectItem) { + // If the list has no items we do not have to flatten anything + if ot.List.Items == nil { + items = append(items, item) + return items, frontier + } + + // All the elements of this object must also be objects! + for _, subitem := range ot.List.Items { + if _, ok := subitem.Val.(*ast.ObjectType); !ok { + items = append(items, item) + return items, frontier + } + } + + // Great! We have a match go through all the items and flatten + for _, subitem := range ot.List.Items { + // Copy the new key + keys := make([]*ast.ObjectKey, len(item.Keys)+len(subitem.Keys)) + copy(keys, item.Keys) + copy(keys[len(item.Keys):], subitem.Keys) + + // Add it to the frontier so that we can recurse + frontier = append(frontier, &ast.ObjectItem{ + Keys: keys, + Assign: item.Assign, + Val: subitem.Val, + LeadComment: item.LeadComment, + LineComment: item.LineComment, + }) + } + + return items, frontier +} diff --git a/vendor/github.com/hashicorp/hcl/json/parser/parser.go b/vendor/github.com/hashicorp/hcl/json/parser/parser.go new file mode 100644 index 000000000..125a5f072 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/json/parser/parser.go @@ -0,0 +1,313 @@ +package parser + +import ( + "errors" + "fmt" + + "github.com/hashicorp/hcl/hcl/ast" + hcltoken "github.com/hashicorp/hcl/hcl/token" + "github.com/hashicorp/hcl/json/scanner" + "github.com/hashicorp/hcl/json/token" +) + +type Parser struct { + sc *scanner.Scanner + + // Last read token + tok token.Token + commaPrev token.Token + + enableTrace bool + indent int + n int // buffer size (max = 1) +} + +func newParser(src []byte) *Parser { + return &Parser{ + sc: scanner.New(src), + } +} + +// Parse returns the fully parsed source and returns the abstract syntax tree. +func Parse(src []byte) (*ast.File, error) { + p := newParser(src) + return p.Parse() +} + +var errEofToken = errors.New("EOF token found") + +// Parse returns the fully parsed source and returns the abstract syntax tree. +func (p *Parser) Parse() (*ast.File, error) { + f := &ast.File{} + var err, scerr error + p.sc.Error = func(pos token.Pos, msg string) { + scerr = fmt.Errorf("%s: %s", pos, msg) + } + + // The root must be an object in JSON + object, err := p.object() + if scerr != nil { + return nil, scerr + } + if err != nil { + return nil, err + } + + // We make our final node an object list so it is more HCL compatible + f.Node = object.List + + // Flatten it, which finds patterns and turns them into more HCL-like + // AST trees. + flattenObjects(f.Node) + + return f, nil +} + +func (p *Parser) objectList() (*ast.ObjectList, error) { + defer un(trace(p, "ParseObjectList")) + node := &ast.ObjectList{} + + for { + n, err := p.objectItem() + if err == errEofToken { + break // we are finished + } + + // we don't return a nil node, because might want to use already + // collected items. + if err != nil { + return node, err + } + + node.Add(n) + + // Check for a followup comma. If it isn't a comma, then we're done + if tok := p.scan(); tok.Type != token.COMMA { + break + } + } + + return node, nil +} + +// objectItem parses a single object item +func (p *Parser) objectItem() (*ast.ObjectItem, error) { + defer un(trace(p, "ParseObjectItem")) + + keys, err := p.objectKey() + if err != nil { + return nil, err + } + + o := &ast.ObjectItem{ + Keys: keys, + } + + switch p.tok.Type { + case token.COLON: + pos := p.tok.Pos + o.Assign = hcltoken.Pos{ + Filename: pos.Filename, + Offset: pos.Offset, + Line: pos.Line, + Column: pos.Column, + } + + o.Val, err = p.objectValue() + if err != nil { + return nil, err + } + } + + return o, nil +} + +// objectKey parses an object key and returns a ObjectKey AST +func (p *Parser) objectKey() ([]*ast.ObjectKey, error) { + keyCount := 0 + keys := make([]*ast.ObjectKey, 0) + + for { + tok := p.scan() + switch tok.Type { + case token.EOF: + return nil, errEofToken + case token.STRING: + keyCount++ + keys = append(keys, &ast.ObjectKey{ + Token: p.tok.HCLToken(), + }) + case token.COLON: + // If we have a zero keycount it means that we never got + // an object key, i.e. `{ :`. This is a syntax error. + if keyCount == 0 { + return nil, fmt.Errorf("expected: STRING got: %s", p.tok.Type) + } + + // Done + return keys, nil + case token.ILLEGAL: + return nil, errors.New("illegal") + default: + return nil, fmt.Errorf("expected: STRING got: %s", p.tok.Type) + } + } +} + +// object parses any type of object, such as number, bool, string, object or +// list. +func (p *Parser) objectValue() (ast.Node, error) { + defer un(trace(p, "ParseObjectValue")) + tok := p.scan() + + switch tok.Type { + case token.NUMBER, token.FLOAT, token.BOOL, token.NULL, token.STRING: + return p.literalType() + case token.LBRACE: + return p.objectType() + case token.LBRACK: + return p.listType() + case token.EOF: + return nil, errEofToken + } + + return nil, fmt.Errorf("Expected object value, got unknown token: %+v", tok) +} + +// object parses any type of object, such as number, bool, string, object or +// list. +func (p *Parser) object() (*ast.ObjectType, error) { + defer un(trace(p, "ParseType")) + tok := p.scan() + + switch tok.Type { + case token.LBRACE: + return p.objectType() + case token.EOF: + return nil, errEofToken + } + + return nil, fmt.Errorf("Expected object, got unknown token: %+v", tok) +} + +// objectType parses an object type and returns a ObjectType AST +func (p *Parser) objectType() (*ast.ObjectType, error) { + defer un(trace(p, "ParseObjectType")) + + // we assume that the currently scanned token is a LBRACE + o := &ast.ObjectType{} + + l, err := p.objectList() + + // if we hit RBRACE, we are good to go (means we parsed all Items), if it's + // not a RBRACE, it's an syntax error and we just return it. + if err != nil && p.tok.Type != token.RBRACE { + return nil, err + } + + o.List = l + return o, nil +} + +// listType parses a list type and returns a ListType AST +func (p *Parser) listType() (*ast.ListType, error) { + defer un(trace(p, "ParseListType")) + + // we assume that the currently scanned token is a LBRACK + l := &ast.ListType{} + + for { + tok := p.scan() + switch tok.Type { + case token.NUMBER, token.FLOAT, token.STRING: + node, err := p.literalType() + if err != nil { + return nil, err + } + + l.Add(node) + case token.COMMA: + continue + case token.LBRACE: + node, err := p.objectType() + if err != nil { + return nil, err + } + + l.Add(node) + case token.BOOL: + // TODO(arslan) should we support? not supported by HCL yet + case token.LBRACK: + // TODO(arslan) should we support nested lists? Even though it's + // written in README of HCL, it's not a part of the grammar + // (not defined in parse.y) + case token.RBRACK: + // finished + return l, nil + default: + return nil, fmt.Errorf("unexpected token while parsing list: %s", tok.Type) + } + + } +} + +// literalType parses a literal type and returns a LiteralType AST +func (p *Parser) literalType() (*ast.LiteralType, error) { + defer un(trace(p, "ParseLiteral")) + + return &ast.LiteralType{ + Token: p.tok.HCLToken(), + }, nil +} + +// scan returns the next token from the underlying scanner. If a token has +// been unscanned then read that instead. +func (p *Parser) scan() token.Token { + // If we have a token on the buffer, then return it. + if p.n != 0 { + p.n = 0 + return p.tok + } + + p.tok = p.sc.Scan() + return p.tok +} + +// unscan pushes the previously read token back onto the buffer. +func (p *Parser) unscan() { + p.n = 1 +} + +// ---------------------------------------------------------------------------- +// Parsing support + +func (p *Parser) printTrace(a ...interface{}) { + if !p.enableTrace { + return + } + + const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . " + const n = len(dots) + fmt.Printf("%5d:%3d: ", p.tok.Pos.Line, p.tok.Pos.Column) + + i := 2 * p.indent + for i > n { + fmt.Print(dots) + i -= n + } + // i <= n + fmt.Print(dots[0:i]) + fmt.Println(a...) +} + +func trace(p *Parser, msg string) *Parser { + p.printTrace(msg, "(") + p.indent++ + return p +} + +// Usage pattern: defer un(trace(p, "...")) +func un(p *Parser) { + p.indent-- + p.printTrace(")") +} diff --git a/vendor/github.com/hashicorp/hcl/json/parser/parser_test.go b/vendor/github.com/hashicorp/hcl/json/parser/parser_test.go new file mode 100644 index 000000000..e0cebf50a --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/json/parser/parser_test.go @@ -0,0 +1,384 @@ +package parser + +import ( + "fmt" + "io/ioutil" + "path/filepath" + "reflect" + "runtime" + "testing" + + "github.com/hashicorp/hcl/hcl/ast" + "github.com/hashicorp/hcl/hcl/token" +) + +func TestType(t *testing.T) { + var literals = []struct { + typ token.Type + src string + }{ + {token.STRING, `"foo": "bar"`}, + {token.NUMBER, `"foo": 123`}, + {token.FLOAT, `"foo": 123.12`}, + {token.FLOAT, `"foo": -123.12`}, + {token.BOOL, `"foo": true`}, + {token.STRING, `"foo": null`}, + } + + for _, l := range literals { + t.Logf("Testing: %s", l.src) + + p := newParser([]byte(l.src)) + item, err := p.objectItem() + if err != nil { + t.Error(err) + } + + lit, ok := item.Val.(*ast.LiteralType) + if !ok { + t.Errorf("node should be of type LiteralType, got: %T", item.Val) + } + + if lit.Token.Type != l.typ { + t.Errorf("want: %s, got: %s", l.typ, lit.Token.Type) + } + } +} + +func TestListType(t *testing.T) { + var literals = []struct { + src string + tokens []token.Type + }{ + { + `"foo": ["123", 123]`, + []token.Type{token.STRING, token.NUMBER}, + }, + { + `"foo": [123, "123",]`, + []token.Type{token.NUMBER, token.STRING}, + }, + { + `"foo": []`, + []token.Type{}, + }, + { + `"foo": ["123", 123]`, + []token.Type{token.STRING, token.NUMBER}, + }, + { + `"foo": ["123", {}]`, + []token.Type{token.STRING, token.LBRACE}, + }, + } + + for _, l := range literals { + t.Logf("Testing: %s", l.src) + + p := newParser([]byte(l.src)) + item, err := p.objectItem() + if err != nil { + t.Error(err) + } + + list, ok := item.Val.(*ast.ListType) + if !ok { + t.Errorf("node should be of type LiteralType, got: %T", item.Val) + } + + tokens := []token.Type{} + for _, li := range list.List { + switch v := li.(type) { + case *ast.LiteralType: + tokens = append(tokens, v.Token.Type) + case *ast.ObjectType: + tokens = append(tokens, token.LBRACE) + } + } + + equals(t, l.tokens, tokens) + } +} + +func TestObjectType(t *testing.T) { + var literals = []struct { + src string + nodeType []ast.Node + itemLen int + }{ + { + `"foo": {}`, + nil, + 0, + }, + { + `"foo": { + "bar": "fatih" + }`, + []ast.Node{&ast.LiteralType{}}, + 1, + }, + { + `"foo": { + "bar": "fatih", + "baz": ["arslan"] + }`, + []ast.Node{ + &ast.LiteralType{}, + &ast.ListType{}, + }, + 2, + }, + { + `"foo": { + "bar": {} + }`, + []ast.Node{ + &ast.ObjectType{}, + }, + 1, + }, + { + `"foo": { + "bar": {}, + "foo": true + }`, + []ast.Node{ + &ast.ObjectType{}, + &ast.LiteralType{}, + }, + 2, + }, + } + + for _, l := range literals { + t.Logf("Testing:\n%s\n", l.src) + + p := newParser([]byte(l.src)) + // p.enableTrace = true + item, err := p.objectItem() + if err != nil { + t.Error(err) + } + + // we know that the ObjectKey name is foo for all cases, what matters + // is the object + obj, ok := item.Val.(*ast.ObjectType) + if !ok { + t.Errorf("node should be of type LiteralType, got: %T", item.Val) + } + + // check if the total length of items are correct + equals(t, l.itemLen, len(obj.List.Items)) + + // check if the types are correct + for i, item := range obj.List.Items { + equals(t, reflect.TypeOf(l.nodeType[i]), reflect.TypeOf(item.Val)) + } + } +} + +func TestFlattenObjects(t *testing.T) { + var literals = []struct { + src string + nodeType []ast.Node + itemLen int + }{ + { + `{ + "foo": [ + { + "foo": "svh", + "bar": "fatih" + } + ] + }`, + []ast.Node{ + &ast.ObjectType{}, + &ast.LiteralType{}, + &ast.LiteralType{}, + }, + 3, + }, + { + `{ + "variable": { + "foo": {} + } + }`, + []ast.Node{ + &ast.ObjectType{}, + }, + 1, + }, + { + `{ + "empty": [] + }`, + []ast.Node{ + &ast.ListType{}, + }, + 1, + }, + { + `{ + "basic": [1, 2, 3] + }`, + []ast.Node{ + &ast.ListType{}, + }, + 1, + }, + } + + for _, l := range literals { + t.Logf("Testing:\n%s\n", l.src) + + f, err := Parse([]byte(l.src)) + if err != nil { + t.Error(err) + } + + // the first object is always an ObjectList so just assert that one + // so we can use it as such + obj, ok := f.Node.(*ast.ObjectList) + if !ok { + t.Errorf("node should be *ast.ObjectList, got: %T", f.Node) + } + + // check if the types are correct + var i int + for _, item := range obj.Items { + equals(t, reflect.TypeOf(l.nodeType[i]), reflect.TypeOf(item.Val)) + i++ + + if obj, ok := item.Val.(*ast.ObjectType); ok { + for _, item := range obj.List.Items { + equals(t, reflect.TypeOf(l.nodeType[i]), reflect.TypeOf(item.Val)) + i++ + } + } + } + + // check if the number of items is correct + equals(t, l.itemLen, i) + + } +} + +func TestObjectKey(t *testing.T) { + keys := []struct { + exp []token.Type + src string + }{ + {[]token.Type{token.STRING}, `"foo": {}`}, + } + + for _, k := range keys { + p := newParser([]byte(k.src)) + keys, err := p.objectKey() + if err != nil { + t.Fatal(err) + } + + tokens := []token.Type{} + for _, o := range keys { + tokens = append(tokens, o.Token.Type) + } + + equals(t, k.exp, tokens) + } + + errKeys := []struct { + src string + }{ + {`foo 12 {}`}, + {`foo bar = {}`}, + {`foo []`}, + {`12 {}`}, + } + + for _, k := range errKeys { + p := newParser([]byte(k.src)) + _, err := p.objectKey() + if err == nil { + t.Errorf("case '%s' should give an error", k.src) + } + } +} + +// Official HCL tests +func TestParse(t *testing.T) { + cases := []struct { + Name string + Err bool + }{ + { + "array.json", + false, + }, + { + "basic.json", + false, + }, + { + "object.json", + false, + }, + { + "types.json", + false, + }, + { + "bad_input_128.json", + true, + }, + { + "bad_input_tf_8110.json", + true, + }, + { + "good_input_tf_8110.json", + false, + }, + } + + const fixtureDir = "./test-fixtures" + + for _, tc := range cases { + d, err := ioutil.ReadFile(filepath.Join(fixtureDir, tc.Name)) + if err != nil { + t.Fatalf("err: %s", err) + } + + _, err = Parse(d) + if (err != nil) != tc.Err { + t.Fatalf("Input: %s\n\nError: %s", tc.Name, err) + } + } +} + +func TestParse_inline(t *testing.T) { + cases := []struct { + Value string + Err bool + }{ + {"{:{", true}, + } + + for _, tc := range cases { + _, err := Parse([]byte(tc.Value)) + if (err != nil) != tc.Err { + t.Fatalf("Input: %q\n\nError: %s", tc.Value, err) + } + } +} + +// equals fails the test if exp is not equal to act. +func equals(tb testing.TB, exp, act interface{}) { + if !reflect.DeepEqual(exp, act) { + _, file, line, _ := runtime.Caller(1) + fmt.Printf("\033[31m%s:%d:\n\n\texp: %s\n\n\tgot: %s\033[39m\n\n", filepath.Base(file), line, exp, act) + tb.FailNow() + } +} diff --git a/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/array.json b/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/array.json new file mode 100644 index 000000000..e320f17ab --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/array.json @@ -0,0 +1,4 @@ +{ + "foo": [1, 2, "bar"], + "bar": "baz" +} diff --git a/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/bad_input_128.json b/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/bad_input_128.json new file mode 100644 index 000000000..b5f850c96 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/bad_input_128.json @@ -0,0 +1 @@ +{:{ diff --git a/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/bad_input_tf_8110.json b/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/bad_input_tf_8110.json new file mode 100644 index 000000000..a04385833 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/bad_input_tf_8110.json @@ -0,0 +1,7 @@ +{ + "variable": { + "poc": { + "default": "${replace("europe-west", "-", " ")}" + } + } +} diff --git a/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/basic.json b/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/basic.json new file mode 100644 index 000000000..b54bde96c --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/basic.json @@ -0,0 +1,3 @@ +{ + "foo": "bar" +} diff --git a/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/good_input_tf_8110.json b/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/good_input_tf_8110.json new file mode 100644 index 000000000..f21aa090d --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/good_input_tf_8110.json @@ -0,0 +1,7 @@ +{ + "variable": { + "poc": { + "default": "${replace(\"europe-west\", \"-\", \" \")}" + } + } +} diff --git a/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/object.json b/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/object.json new file mode 100644 index 000000000..72168a3cc --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/object.json @@ -0,0 +1,5 @@ +{ + "foo": { + "bar": [1,2] + } +} diff --git a/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/types.json b/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/types.json new file mode 100644 index 000000000..9a142a6ca --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/types.json @@ -0,0 +1,10 @@ +{ + "foo": "bar", + "bar": 7, + "baz": [1,2,3], + "foo": -12, + "bar": 3.14159, + "foo": true, + "bar": false, + "foo": null +} diff --git a/vendor/github.com/hashicorp/hcl/json/scanner/scanner.go b/vendor/github.com/hashicorp/hcl/json/scanner/scanner.go new file mode 100644 index 000000000..fe3f0f095 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/json/scanner/scanner.go @@ -0,0 +1,451 @@ +package scanner + +import ( + "bytes" + "fmt" + "os" + "unicode" + "unicode/utf8" + + "github.com/hashicorp/hcl/json/token" +) + +// eof represents a marker rune for the end of the reader. +const eof = rune(0) + +// Scanner defines a lexical scanner +type Scanner struct { + buf *bytes.Buffer // Source buffer for advancing and scanning + src []byte // Source buffer for immutable access + + // Source Position + srcPos token.Pos // current position + prevPos token.Pos // previous position, used for peek() method + + lastCharLen int // length of last character in bytes + lastLineLen int // length of last line in characters (for correct column reporting) + + tokStart int // token text start position + tokEnd int // token text end position + + // Error is called for each error encountered. If no Error + // function is set, the error is reported to os.Stderr. + Error func(pos token.Pos, msg string) + + // ErrorCount is incremented by one for each error encountered. + ErrorCount int + + // tokPos is the start position of most recently scanned token; set by + // Scan. The Filename field is always left untouched by the Scanner. If + // an error is reported (via Error) and Position is invalid, the scanner is + // not inside a token. + tokPos token.Pos +} + +// New creates and initializes a new instance of Scanner using src as +// its source content. +func New(src []byte) *Scanner { + // even though we accept a src, we read from a io.Reader compatible type + // (*bytes.Buffer). So in the future we might easily change it to streaming + // read. + b := bytes.NewBuffer(src) + s := &Scanner{ + buf: b, + src: src, + } + + // srcPosition always starts with 1 + s.srcPos.Line = 1 + return s +} + +// next reads the next rune from the bufferred reader. Returns the rune(0) if +// an error occurs (or io.EOF is returned). +func (s *Scanner) next() rune { + ch, size, err := s.buf.ReadRune() + if err != nil { + // advance for error reporting + s.srcPos.Column++ + s.srcPos.Offset += size + s.lastCharLen = size + return eof + } + + if ch == utf8.RuneError && size == 1 { + s.srcPos.Column++ + s.srcPos.Offset += size + s.lastCharLen = size + s.err("illegal UTF-8 encoding") + return ch + } + + // remember last position + s.prevPos = s.srcPos + + s.srcPos.Column++ + s.lastCharLen = size + s.srcPos.Offset += size + + if ch == '\n' { + s.srcPos.Line++ + s.lastLineLen = s.srcPos.Column + s.srcPos.Column = 0 + } + + // debug + // fmt.Printf("ch: %q, offset:column: %d:%d\n", ch, s.srcPos.Offset, s.srcPos.Column) + return ch +} + +// unread unreads the previous read Rune and updates the source position +func (s *Scanner) unread() { + if err := s.buf.UnreadRune(); err != nil { + panic(err) // this is user fault, we should catch it + } + s.srcPos = s.prevPos // put back last position +} + +// peek returns the next rune without advancing the reader. +func (s *Scanner) peek() rune { + peek, _, err := s.buf.ReadRune() + if err != nil { + return eof + } + + s.buf.UnreadRune() + return peek +} + +// Scan scans the next token and returns the token. +func (s *Scanner) Scan() token.Token { + ch := s.next() + + // skip white space + for isWhitespace(ch) { + ch = s.next() + } + + var tok token.Type + + // token text markings + s.tokStart = s.srcPos.Offset - s.lastCharLen + + // token position, initial next() is moving the offset by one(size of rune + // actually), though we are interested with the starting point + s.tokPos.Offset = s.srcPos.Offset - s.lastCharLen + if s.srcPos.Column > 0 { + // common case: last character was not a '\n' + s.tokPos.Line = s.srcPos.Line + s.tokPos.Column = s.srcPos.Column + } else { + // last character was a '\n' + // (we cannot be at the beginning of the source + // since we have called next() at least once) + s.tokPos.Line = s.srcPos.Line - 1 + s.tokPos.Column = s.lastLineLen + } + + switch { + case isLetter(ch): + lit := s.scanIdentifier() + if lit == "true" || lit == "false" { + tok = token.BOOL + } else if lit == "null" { + tok = token.NULL + } else { + s.err("illegal char") + } + case isDecimal(ch): + tok = s.scanNumber(ch) + default: + switch ch { + case eof: + tok = token.EOF + case '"': + tok = token.STRING + s.scanString() + case '.': + tok = token.PERIOD + ch = s.peek() + if isDecimal(ch) { + tok = token.FLOAT + ch = s.scanMantissa(ch) + ch = s.scanExponent(ch) + } + case '[': + tok = token.LBRACK + case ']': + tok = token.RBRACK + case '{': + tok = token.LBRACE + case '}': + tok = token.RBRACE + case ',': + tok = token.COMMA + case ':': + tok = token.COLON + case '-': + if isDecimal(s.peek()) { + ch := s.next() + tok = s.scanNumber(ch) + } else { + s.err("illegal char") + } + default: + s.err("illegal char: " + string(ch)) + } + } + + // finish token ending + s.tokEnd = s.srcPos.Offset + + // create token literal + var tokenText string + if s.tokStart >= 0 { + tokenText = string(s.src[s.tokStart:s.tokEnd]) + } + s.tokStart = s.tokEnd // ensure idempotency of tokenText() call + + return token.Token{ + Type: tok, + Pos: s.tokPos, + Text: tokenText, + } +} + +// scanNumber scans a HCL number definition starting with the given rune +func (s *Scanner) scanNumber(ch rune) token.Type { + zero := ch == '0' + pos := s.srcPos + + s.scanMantissa(ch) + ch = s.next() // seek forward + if ch == 'e' || ch == 'E' { + ch = s.scanExponent(ch) + return token.FLOAT + } + + if ch == '.' { + ch = s.scanFraction(ch) + if ch == 'e' || ch == 'E' { + ch = s.next() + ch = s.scanExponent(ch) + } + return token.FLOAT + } + + if ch != eof { + s.unread() + } + + // If we have a larger number and this is zero, error + if zero && pos != s.srcPos { + s.err("numbers cannot start with 0") + } + + return token.NUMBER +} + +// scanMantissa scans the mantissa beginning from the rune. It returns the next +// non decimal rune. It's used to determine wheter it's a fraction or exponent. +func (s *Scanner) scanMantissa(ch rune) rune { + scanned := false + for isDecimal(ch) { + ch = s.next() + scanned = true + } + + if scanned && ch != eof { + s.unread() + } + return ch +} + +// scanFraction scans the fraction after the '.' rune +func (s *Scanner) scanFraction(ch rune) rune { + if ch == '.' { + ch = s.peek() // we peek just to see if we can move forward + ch = s.scanMantissa(ch) + } + return ch +} + +// scanExponent scans the remaining parts of an exponent after the 'e' or 'E' +// rune. +func (s *Scanner) scanExponent(ch rune) rune { + if ch == 'e' || ch == 'E' { + ch = s.next() + if ch == '-' || ch == '+' { + ch = s.next() + } + ch = s.scanMantissa(ch) + } + return ch +} + +// scanString scans a quoted string +func (s *Scanner) scanString() { + braces := 0 + for { + // '"' opening already consumed + // read character after quote + ch := s.next() + + if ch == '\n' || ch < 0 || ch == eof { + s.err("literal not terminated") + return + } + + if ch == '"' { + break + } + + // If we're going into a ${} then we can ignore quotes for awhile + if braces == 0 && ch == '$' && s.peek() == '{' { + braces++ + s.next() + } else if braces > 0 && ch == '{' { + braces++ + } + if braces > 0 && ch == '}' { + braces-- + } + + if ch == '\\' { + s.scanEscape() + } + } + + return +} + +// scanEscape scans an escape sequence +func (s *Scanner) scanEscape() rune { + // http://en.cppreference.com/w/cpp/language/escape + ch := s.next() // read character after '/' + switch ch { + case 'a', 'b', 'f', 'n', 'r', 't', 'v', '\\', '"': + // nothing to do + case '0', '1', '2', '3', '4', '5', '6', '7': + // octal notation + ch = s.scanDigits(ch, 8, 3) + case 'x': + // hexademical notation + ch = s.scanDigits(s.next(), 16, 2) + case 'u': + // universal character name + ch = s.scanDigits(s.next(), 16, 4) + case 'U': + // universal character name + ch = s.scanDigits(s.next(), 16, 8) + default: + s.err("illegal char escape") + } + return ch +} + +// scanDigits scans a rune with the given base for n times. For example an +// octal notation \184 would yield in scanDigits(ch, 8, 3) +func (s *Scanner) scanDigits(ch rune, base, n int) rune { + for n > 0 && digitVal(ch) < base { + ch = s.next() + n-- + } + if n > 0 { + s.err("illegal char escape") + } + + // we scanned all digits, put the last non digit char back + s.unread() + return ch +} + +// scanIdentifier scans an identifier and returns the literal string +func (s *Scanner) scanIdentifier() string { + offs := s.srcPos.Offset - s.lastCharLen + ch := s.next() + for isLetter(ch) || isDigit(ch) || ch == '-' { + ch = s.next() + } + + if ch != eof { + s.unread() // we got identifier, put back latest char + } + + return string(s.src[offs:s.srcPos.Offset]) +} + +// recentPosition returns the position of the character immediately after the +// character or token returned by the last call to Scan. +func (s *Scanner) recentPosition() (pos token.Pos) { + pos.Offset = s.srcPos.Offset - s.lastCharLen + switch { + case s.srcPos.Column > 0: + // common case: last character was not a '\n' + pos.Line = s.srcPos.Line + pos.Column = s.srcPos.Column + case s.lastLineLen > 0: + // last character was a '\n' + // (we cannot be at the beginning of the source + // since we have called next() at least once) + pos.Line = s.srcPos.Line - 1 + pos.Column = s.lastLineLen + default: + // at the beginning of the source + pos.Line = 1 + pos.Column = 1 + } + return +} + +// err prints the error of any scanning to s.Error function. If the function is +// not defined, by default it prints them to os.Stderr +func (s *Scanner) err(msg string) { + s.ErrorCount++ + pos := s.recentPosition() + + if s.Error != nil { + s.Error(pos, msg) + return + } + + fmt.Fprintf(os.Stderr, "%s: %s\n", pos, msg) +} + +// isHexadecimal returns true if the given rune is a letter +func isLetter(ch rune) bool { + return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || ch == '_' || ch >= 0x80 && unicode.IsLetter(ch) +} + +// isHexadecimal returns true if the given rune is a decimal digit +func isDigit(ch rune) bool { + return '0' <= ch && ch <= '9' || ch >= 0x80 && unicode.IsDigit(ch) +} + +// isHexadecimal returns true if the given rune is a decimal number +func isDecimal(ch rune) bool { + return '0' <= ch && ch <= '9' +} + +// isHexadecimal returns true if the given rune is an hexadecimal number +func isHexadecimal(ch rune) bool { + return '0' <= ch && ch <= '9' || 'a' <= ch && ch <= 'f' || 'A' <= ch && ch <= 'F' +} + +// isWhitespace returns true if the rune is a space, tab, newline or carriage return +func isWhitespace(ch rune) bool { + return ch == ' ' || ch == '\t' || ch == '\n' || ch == '\r' +} + +// digitVal returns the integer value of a given octal,decimal or hexadecimal rune +func digitVal(ch rune) int { + switch { + case '0' <= ch && ch <= '9': + return int(ch - '0') + case 'a' <= ch && ch <= 'f': + return int(ch - 'a' + 10) + case 'A' <= ch && ch <= 'F': + return int(ch - 'A' + 10) + } + return 16 // larger than any legal digit val +} diff --git a/vendor/github.com/hashicorp/hcl/json/scanner/scanner_test.go b/vendor/github.com/hashicorp/hcl/json/scanner/scanner_test.go new file mode 100644 index 000000000..3033a5797 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/json/scanner/scanner_test.go @@ -0,0 +1,362 @@ +package scanner + +import ( + "bytes" + "fmt" + "testing" + + "github.com/hashicorp/hcl/json/token" +) + +var f100 = "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff" + +type tokenPair struct { + tok token.Type + text string +} + +var tokenLists = map[string][]tokenPair{ + "operator": []tokenPair{ + {token.LBRACK, "["}, + {token.LBRACE, "{"}, + {token.COMMA, ","}, + {token.PERIOD, "."}, + {token.RBRACK, "]"}, + {token.RBRACE, "}"}, + }, + "bool": []tokenPair{ + {token.BOOL, "true"}, + {token.BOOL, "false"}, + }, + "string": []tokenPair{ + {token.STRING, `" "`}, + {token.STRING, `"a"`}, + {token.STRING, `"本"`}, + {token.STRING, `"${file(\"foo\")}"`}, + {token.STRING, `"\a"`}, + {token.STRING, `"\b"`}, + {token.STRING, `"\f"`}, + {token.STRING, `"\n"`}, + {token.STRING, `"\r"`}, + {token.STRING, `"\t"`}, + {token.STRING, `"\v"`}, + {token.STRING, `"\""`}, + {token.STRING, `"\000"`}, + {token.STRING, `"\777"`}, + {token.STRING, `"\x00"`}, + {token.STRING, `"\xff"`}, + {token.STRING, `"\u0000"`}, + {token.STRING, `"\ufA16"`}, + {token.STRING, `"\U00000000"`}, + {token.STRING, `"\U0000ffAB"`}, + {token.STRING, `"` + f100 + `"`}, + }, + "number": []tokenPair{ + {token.NUMBER, "0"}, + {token.NUMBER, "1"}, + {token.NUMBER, "9"}, + {token.NUMBER, "42"}, + {token.NUMBER, "1234567890"}, + {token.NUMBER, "-0"}, + {token.NUMBER, "-1"}, + {token.NUMBER, "-9"}, + {token.NUMBER, "-42"}, + {token.NUMBER, "-1234567890"}, + }, + "float": []tokenPair{ + {token.FLOAT, "0."}, + {token.FLOAT, "1."}, + {token.FLOAT, "42."}, + {token.FLOAT, "01234567890."}, + {token.FLOAT, ".0"}, + {token.FLOAT, ".1"}, + {token.FLOAT, ".42"}, + {token.FLOAT, ".0123456789"}, + {token.FLOAT, "0.0"}, + {token.FLOAT, "1.0"}, + {token.FLOAT, "42.0"}, + {token.FLOAT, "01234567890.0"}, + {token.FLOAT, "0e0"}, + {token.FLOAT, "1e0"}, + {token.FLOAT, "42e0"}, + {token.FLOAT, "01234567890e0"}, + {token.FLOAT, "0E0"}, + {token.FLOAT, "1E0"}, + {token.FLOAT, "42E0"}, + {token.FLOAT, "01234567890E0"}, + {token.FLOAT, "0e+10"}, + {token.FLOAT, "1e-10"}, + {token.FLOAT, "42e+10"}, + {token.FLOAT, "01234567890e-10"}, + {token.FLOAT, "0E+10"}, + {token.FLOAT, "1E-10"}, + {token.FLOAT, "42E+10"}, + {token.FLOAT, "01234567890E-10"}, + {token.FLOAT, "01.8e0"}, + {token.FLOAT, "1.4e0"}, + {token.FLOAT, "42.2e0"}, + {token.FLOAT, "01234567890.12e0"}, + {token.FLOAT, "0.E0"}, + {token.FLOAT, "1.12E0"}, + {token.FLOAT, "42.123E0"}, + {token.FLOAT, "01234567890.213E0"}, + {token.FLOAT, "0.2e+10"}, + {token.FLOAT, "1.2e-10"}, + {token.FLOAT, "42.54e+10"}, + {token.FLOAT, "01234567890.98e-10"}, + {token.FLOAT, "0.1E+10"}, + {token.FLOAT, "1.1E-10"}, + {token.FLOAT, "42.1E+10"}, + {token.FLOAT, "01234567890.1E-10"}, + {token.FLOAT, "-0.0"}, + {token.FLOAT, "-1.0"}, + {token.FLOAT, "-42.0"}, + {token.FLOAT, "-01234567890.0"}, + {token.FLOAT, "-0e0"}, + {token.FLOAT, "-1e0"}, + {token.FLOAT, "-42e0"}, + {token.FLOAT, "-01234567890e0"}, + {token.FLOAT, "-0E0"}, + {token.FLOAT, "-1E0"}, + {token.FLOAT, "-42E0"}, + {token.FLOAT, "-01234567890E0"}, + {token.FLOAT, "-0e+10"}, + {token.FLOAT, "-1e-10"}, + {token.FLOAT, "-42e+10"}, + {token.FLOAT, "-01234567890e-10"}, + {token.FLOAT, "-0E+10"}, + {token.FLOAT, "-1E-10"}, + {token.FLOAT, "-42E+10"}, + {token.FLOAT, "-01234567890E-10"}, + {token.FLOAT, "-01.8e0"}, + {token.FLOAT, "-1.4e0"}, + {token.FLOAT, "-42.2e0"}, + {token.FLOAT, "-01234567890.12e0"}, + {token.FLOAT, "-0.E0"}, + {token.FLOAT, "-1.12E0"}, + {token.FLOAT, "-42.123E0"}, + {token.FLOAT, "-01234567890.213E0"}, + {token.FLOAT, "-0.2e+10"}, + {token.FLOAT, "-1.2e-10"}, + {token.FLOAT, "-42.54e+10"}, + {token.FLOAT, "-01234567890.98e-10"}, + {token.FLOAT, "-0.1E+10"}, + {token.FLOAT, "-1.1E-10"}, + {token.FLOAT, "-42.1E+10"}, + {token.FLOAT, "-01234567890.1E-10"}, + }, +} + +var orderedTokenLists = []string{ + "comment", + "operator", + "bool", + "string", + "number", + "float", +} + +func TestPosition(t *testing.T) { + // create artifical source code + buf := new(bytes.Buffer) + + for _, listName := range orderedTokenLists { + for _, ident := range tokenLists[listName] { + fmt.Fprintf(buf, "\t\t\t\t%s\n", ident.text) + } + } + + s := New(buf.Bytes()) + + pos := token.Pos{"", 4, 1, 5} + s.Scan() + for _, listName := range orderedTokenLists { + + for _, k := range tokenLists[listName] { + curPos := s.tokPos + // fmt.Printf("[%q] s = %+v:%+v\n", k.text, curPos.Offset, curPos.Column) + + if curPos.Offset != pos.Offset { + t.Fatalf("offset = %d, want %d for %q", curPos.Offset, pos.Offset, k.text) + } + if curPos.Line != pos.Line { + t.Fatalf("line = %d, want %d for %q", curPos.Line, pos.Line, k.text) + } + if curPos.Column != pos.Column { + t.Fatalf("column = %d, want %d for %q", curPos.Column, pos.Column, k.text) + } + pos.Offset += 4 + len(k.text) + 1 // 4 tabs + token bytes + newline + pos.Line += countNewlines(k.text) + 1 // each token is on a new line + + s.Error = func(pos token.Pos, msg string) { + t.Errorf("error %q for %q", msg, k.text) + } + + s.Scan() + } + } + // make sure there were no token-internal errors reported by scanner + if s.ErrorCount != 0 { + t.Errorf("%d errors", s.ErrorCount) + } +} + +func TestComment(t *testing.T) { + testTokenList(t, tokenLists["comment"]) +} + +func TestOperator(t *testing.T) { + testTokenList(t, tokenLists["operator"]) +} + +func TestBool(t *testing.T) { + testTokenList(t, tokenLists["bool"]) +} + +func TestIdent(t *testing.T) { + testTokenList(t, tokenLists["ident"]) +} + +func TestString(t *testing.T) { + testTokenList(t, tokenLists["string"]) +} + +func TestNumber(t *testing.T) { + testTokenList(t, tokenLists["number"]) +} + +func TestFloat(t *testing.T) { + testTokenList(t, tokenLists["float"]) +} + +func TestRealExample(t *testing.T) { + complexReal := ` +{ + "variable": { + "foo": { + "default": "bar", + "description": "bar", + "depends_on": ["something"] + } + } +}` + + literals := []struct { + tokenType token.Type + literal string + }{ + {token.LBRACE, `{`}, + {token.STRING, `"variable"`}, + {token.COLON, `:`}, + {token.LBRACE, `{`}, + {token.STRING, `"foo"`}, + {token.COLON, `:`}, + {token.LBRACE, `{`}, + {token.STRING, `"default"`}, + {token.COLON, `:`}, + {token.STRING, `"bar"`}, + {token.COMMA, `,`}, + {token.STRING, `"description"`}, + {token.COLON, `:`}, + {token.STRING, `"bar"`}, + {token.COMMA, `,`}, + {token.STRING, `"depends_on"`}, + {token.COLON, `:`}, + {token.LBRACK, `[`}, + {token.STRING, `"something"`}, + {token.RBRACK, `]`}, + {token.RBRACE, `}`}, + {token.RBRACE, `}`}, + {token.RBRACE, `}`}, + {token.EOF, ``}, + } + + s := New([]byte(complexReal)) + for _, l := range literals { + tok := s.Scan() + if l.tokenType != tok.Type { + t.Errorf("got: %s want %s for %s\n", tok, l.tokenType, tok.String()) + } + + if l.literal != tok.Text { + t.Errorf("got: %s want %s\n", tok, l.literal) + } + } + +} + +func TestError(t *testing.T) { + testError(t, "\x80", "1:1", "illegal UTF-8 encoding", token.ILLEGAL) + testError(t, "\xff", "1:1", "illegal UTF-8 encoding", token.ILLEGAL) + + testError(t, `"ab`+"\x80", "1:4", "illegal UTF-8 encoding", token.STRING) + testError(t, `"abc`+"\xff", "1:5", "illegal UTF-8 encoding", token.STRING) + + testError(t, `01238`, "1:7", "numbers cannot start with 0", token.NUMBER) + testError(t, `01238123`, "1:10", "numbers cannot start with 0", token.NUMBER) + testError(t, `'aa'`, "1:1", "illegal char: '", token.ILLEGAL) + + testError(t, `"`, "1:2", "literal not terminated", token.STRING) + testError(t, `"abc`, "1:5", "literal not terminated", token.STRING) + testError(t, `"abc`+"\n", "1:5", "literal not terminated", token.STRING) +} + +func testError(t *testing.T, src, pos, msg string, tok token.Type) { + s := New([]byte(src)) + + errorCalled := false + s.Error = func(p token.Pos, m string) { + if !errorCalled { + if pos != p.String() { + t.Errorf("pos = %q, want %q for %q", p, pos, src) + } + + if m != msg { + t.Errorf("msg = %q, want %q for %q", m, msg, src) + } + errorCalled = true + } + } + + tk := s.Scan() + if tk.Type != tok { + t.Errorf("tok = %s, want %s for %q", tk, tok, src) + } + if !errorCalled { + t.Errorf("error handler not called for %q", src) + } + if s.ErrorCount == 0 { + t.Errorf("count = %d, want > 0 for %q", s.ErrorCount, src) + } +} + +func testTokenList(t *testing.T, tokenList []tokenPair) { + // create artifical source code + buf := new(bytes.Buffer) + for _, ident := range tokenList { + fmt.Fprintf(buf, "%s\n", ident.text) + } + + s := New(buf.Bytes()) + for _, ident := range tokenList { + tok := s.Scan() + if tok.Type != ident.tok { + t.Errorf("tok = %q want %q for %q\n", tok, ident.tok, ident.text) + } + + if tok.Text != ident.text { + t.Errorf("text = %q want %q", tok.String(), ident.text) + } + + } +} + +func countNewlines(s string) int { + n := 0 + for _, ch := range s { + if ch == '\n' { + n++ + } + } + return n +} diff --git a/vendor/github.com/hashicorp/hcl/json/test-fixtures/array.json b/vendor/github.com/hashicorp/hcl/json/test-fixtures/array.json new file mode 100644 index 000000000..e320f17ab --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/json/test-fixtures/array.json @@ -0,0 +1,4 @@ +{ + "foo": [1, 2, "bar"], + "bar": "baz" +} diff --git a/vendor/github.com/hashicorp/hcl/json/test-fixtures/basic.json b/vendor/github.com/hashicorp/hcl/json/test-fixtures/basic.json new file mode 100644 index 000000000..b54bde96c --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/json/test-fixtures/basic.json @@ -0,0 +1,3 @@ +{ + "foo": "bar" +} diff --git a/vendor/github.com/hashicorp/hcl/json/test-fixtures/object.json b/vendor/github.com/hashicorp/hcl/json/test-fixtures/object.json new file mode 100644 index 000000000..72168a3cc --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/json/test-fixtures/object.json @@ -0,0 +1,5 @@ +{ + "foo": { + "bar": [1,2] + } +} diff --git a/vendor/github.com/hashicorp/hcl/json/test-fixtures/types.json b/vendor/github.com/hashicorp/hcl/json/test-fixtures/types.json new file mode 100644 index 000000000..9a142a6ca --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/json/test-fixtures/types.json @@ -0,0 +1,10 @@ +{ + "foo": "bar", + "bar": 7, + "baz": [1,2,3], + "foo": -12, + "bar": 3.14159, + "foo": true, + "bar": false, + "foo": null +} diff --git a/vendor/github.com/hashicorp/hcl/json/token/position.go b/vendor/github.com/hashicorp/hcl/json/token/position.go new file mode 100644 index 000000000..59c1bb72d --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/json/token/position.go @@ -0,0 +1,46 @@ +package token + +import "fmt" + +// Pos describes an arbitrary source position +// including the file, line, and column location. +// A Position is valid if the line number is > 0. +type Pos struct { + Filename string // filename, if any + Offset int // offset, starting at 0 + Line int // line number, starting at 1 + Column int // column number, starting at 1 (character count) +} + +// IsValid returns true if the position is valid. +func (p *Pos) IsValid() bool { return p.Line > 0 } + +// String returns a string in one of several forms: +// +// file:line:column valid position with file name +// line:column valid position without file name +// file invalid position with file name +// - invalid position without file name +func (p Pos) String() string { + s := p.Filename + if p.IsValid() { + if s != "" { + s += ":" + } + s += fmt.Sprintf("%d:%d", p.Line, p.Column) + } + if s == "" { + s = "-" + } + return s +} + +// Before reports whether the position p is before u. +func (p Pos) Before(u Pos) bool { + return u.Offset > p.Offset || u.Line > p.Line +} + +// After reports whether the position p is after u. +func (p Pos) After(u Pos) bool { + return u.Offset < p.Offset || u.Line < p.Line +} diff --git a/vendor/github.com/hashicorp/hcl/json/token/token.go b/vendor/github.com/hashicorp/hcl/json/token/token.go new file mode 100644 index 000000000..95a0c3eee --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/json/token/token.go @@ -0,0 +1,118 @@ +package token + +import ( + "fmt" + "strconv" + + hcltoken "github.com/hashicorp/hcl/hcl/token" +) + +// Token defines a single HCL token which can be obtained via the Scanner +type Token struct { + Type Type + Pos Pos + Text string +} + +// Type is the set of lexical tokens of the HCL (HashiCorp Configuration Language) +type Type int + +const ( + // Special tokens + ILLEGAL Type = iota + EOF + + identifier_beg + literal_beg + NUMBER // 12345 + FLOAT // 123.45 + BOOL // true,false + STRING // "abc" + NULL // null + literal_end + identifier_end + + operator_beg + LBRACK // [ + LBRACE // { + COMMA // , + PERIOD // . + COLON // : + + RBRACK // ] + RBRACE // } + + operator_end +) + +var tokens = [...]string{ + ILLEGAL: "ILLEGAL", + + EOF: "EOF", + + NUMBER: "NUMBER", + FLOAT: "FLOAT", + BOOL: "BOOL", + STRING: "STRING", + NULL: "NULL", + + LBRACK: "LBRACK", + LBRACE: "LBRACE", + COMMA: "COMMA", + PERIOD: "PERIOD", + COLON: "COLON", + + RBRACK: "RBRACK", + RBRACE: "RBRACE", +} + +// String returns the string corresponding to the token tok. +func (t Type) String() string { + s := "" + if 0 <= t && t < Type(len(tokens)) { + s = tokens[t] + } + if s == "" { + s = "token(" + strconv.Itoa(int(t)) + ")" + } + return s +} + +// IsIdentifier returns true for tokens corresponding to identifiers and basic +// type literals; it returns false otherwise. +func (t Type) IsIdentifier() bool { return identifier_beg < t && t < identifier_end } + +// IsLiteral returns true for tokens corresponding to basic type literals; it +// returns false otherwise. +func (t Type) IsLiteral() bool { return literal_beg < t && t < literal_end } + +// IsOperator returns true for tokens corresponding to operators and +// delimiters; it returns false otherwise. +func (t Type) IsOperator() bool { return operator_beg < t && t < operator_end } + +// String returns the token's literal text. Note that this is only +// applicable for certain token types, such as token.IDENT, +// token.STRING, etc.. +func (t Token) String() string { + return fmt.Sprintf("%s %s %s", t.Pos.String(), t.Type.String(), t.Text) +} + +// HCLToken converts this token to an HCL token. +// +// The token type must be a literal type or this will panic. +func (t Token) HCLToken() hcltoken.Token { + switch t.Type { + case BOOL: + return hcltoken.Token{Type: hcltoken.BOOL, Text: t.Text} + case FLOAT: + return hcltoken.Token{Type: hcltoken.FLOAT, Text: t.Text} + case NULL: + return hcltoken.Token{Type: hcltoken.STRING, Text: ""} + case NUMBER: + return hcltoken.Token{Type: hcltoken.NUMBER, Text: t.Text} + case STRING: + return hcltoken.Token{Type: hcltoken.STRING, Text: t.Text, JSON: true} + default: + panic(fmt.Sprintf("unimplemented HCLToken for type: %s", t.Type)) + } +} diff --git a/vendor/github.com/hashicorp/hcl/json/token/token_test.go b/vendor/github.com/hashicorp/hcl/json/token/token_test.go new file mode 100644 index 000000000..a83fdd55b --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/json/token/token_test.go @@ -0,0 +1,34 @@ +package token + +import ( + "testing" +) + +func TestTypeString(t *testing.T) { + var tokens = []struct { + tt Type + str string + }{ + {ILLEGAL, "ILLEGAL"}, + {EOF, "EOF"}, + {NUMBER, "NUMBER"}, + {FLOAT, "FLOAT"}, + {BOOL, "BOOL"}, + {STRING, "STRING"}, + {NULL, "NULL"}, + {LBRACK, "LBRACK"}, + {LBRACE, "LBRACE"}, + {COMMA, "COMMA"}, + {PERIOD, "PERIOD"}, + {RBRACK, "RBRACK"}, + {RBRACE, "RBRACE"}, + } + + for _, token := range tokens { + if token.tt.String() != token.str { + t.Errorf("want: %q got:%q\n", token.str, token.tt) + + } + } + +} diff --git a/vendor/github.com/hashicorp/hcl/lex.go b/vendor/github.com/hashicorp/hcl/lex.go new file mode 100644 index 000000000..d9993c292 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/lex.go @@ -0,0 +1,38 @@ +package hcl + +import ( + "unicode" + "unicode/utf8" +) + +type lexModeValue byte + +const ( + lexModeUnknown lexModeValue = iota + lexModeHcl + lexModeJson +) + +// lexMode returns whether we're going to be parsing in JSON +// mode or HCL mode. +func lexMode(v []byte) lexModeValue { + var ( + r rune + w int + offset int + ) + + for { + r, w = utf8.DecodeRune(v[offset:]) + offset += w + if unicode.IsSpace(r) { + continue + } + if r == '{' { + return lexModeJson + } + break + } + + return lexModeHcl +} diff --git a/vendor/github.com/hashicorp/hcl/lex_test.go b/vendor/github.com/hashicorp/hcl/lex_test.go new file mode 100644 index 000000000..806276444 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/lex_test.go @@ -0,0 +1,37 @@ +package hcl + +import ( + "testing" +) + +func TestLexMode(t *testing.T) { + cases := []struct { + Input string + Mode lexModeValue + }{ + { + "", + lexModeHcl, + }, + { + "foo", + lexModeHcl, + }, + { + "{}", + lexModeJson, + }, + { + " {}", + lexModeJson, + }, + } + + for i, tc := range cases { + actual := lexMode([]byte(tc.Input)) + + if actual != tc.Mode { + t.Fatalf("%d: %#v", i, actual) + } + } +} diff --git a/vendor/github.com/hashicorp/hcl/parse.go b/vendor/github.com/hashicorp/hcl/parse.go new file mode 100644 index 000000000..1fca53c4c --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/parse.go @@ -0,0 +1,39 @@ +package hcl + +import ( + "fmt" + + "github.com/hashicorp/hcl/hcl/ast" + hclParser "github.com/hashicorp/hcl/hcl/parser" + jsonParser "github.com/hashicorp/hcl/json/parser" +) + +// ParseBytes accepts as input byte slice and returns ast tree. +// +// Input can be either JSON or HCL +func ParseBytes(in []byte) (*ast.File, error) { + return parse(in) +} + +// ParseString accepts input as a string and returns ast tree. +func ParseString(input string) (*ast.File, error) { + return parse([]byte(input)) +} + +func parse(in []byte) (*ast.File, error) { + switch lexMode(in) { + case lexModeHcl: + return hclParser.Parse(in) + case lexModeJson: + return jsonParser.Parse(in) + } + + return nil, fmt.Errorf("unknown config format") +} + +// Parse parses the given input and returns the root object. +// +// The input format can be either HCL or JSON. +func Parse(input string) (*ast.File, error) { + return parse([]byte(input)) +} diff --git a/vendor/github.com/hashicorp/hcl/test-fixtures/assign_deep.hcl b/vendor/github.com/hashicorp/hcl/test-fixtures/assign_deep.hcl new file mode 100644 index 000000000..dd3151cb7 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/test-fixtures/assign_deep.hcl @@ -0,0 +1,5 @@ +resource = [{ + foo = [{ + bar = {} + }] +}] diff --git a/vendor/github.com/hashicorp/hcl/test-fixtures/basic.hcl b/vendor/github.com/hashicorp/hcl/test-fixtures/basic.hcl new file mode 100644 index 000000000..949994487 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/test-fixtures/basic.hcl @@ -0,0 +1,2 @@ +foo = "bar" +bar = "${file("bing/bong.txt")}" diff --git a/vendor/github.com/hashicorp/hcl/test-fixtures/basic.json b/vendor/github.com/hashicorp/hcl/test-fixtures/basic.json new file mode 100644 index 000000000..7bdddc84b --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/test-fixtures/basic.json @@ -0,0 +1,4 @@ +{ + "foo": "bar", + "bar": "${file(\"bing/bong.txt\")}" +} diff --git a/vendor/github.com/hashicorp/hcl/test-fixtures/basic_int_string.hcl b/vendor/github.com/hashicorp/hcl/test-fixtures/basic_int_string.hcl new file mode 100644 index 000000000..4e415da20 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/test-fixtures/basic_int_string.hcl @@ -0,0 +1 @@ +count = "3" diff --git a/vendor/github.com/hashicorp/hcl/test-fixtures/basic_squish.hcl b/vendor/github.com/hashicorp/hcl/test-fixtures/basic_squish.hcl new file mode 100644 index 000000000..363697b49 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/test-fixtures/basic_squish.hcl @@ -0,0 +1,3 @@ +foo="bar" +bar="${file("bing/bong.txt")}" +foo-bar="baz" diff --git a/vendor/github.com/hashicorp/hcl/test-fixtures/block_assign.hcl b/vendor/github.com/hashicorp/hcl/test-fixtures/block_assign.hcl new file mode 100644 index 000000000..ee8b06fe3 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/test-fixtures/block_assign.hcl @@ -0,0 +1,2 @@ +environment = "aws" { +} diff --git a/vendor/github.com/hashicorp/hcl/test-fixtures/decode_policy.hcl b/vendor/github.com/hashicorp/hcl/test-fixtures/decode_policy.hcl new file mode 100644 index 000000000..5b185cc91 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/test-fixtures/decode_policy.hcl @@ -0,0 +1,15 @@ +key "" { + policy = "read" +} + +key "foo/" { + policy = "write" +} + +key "foo/bar/" { + policy = "read" +} + +key "foo/bar/baz" { + policy = "deny" +} diff --git a/vendor/github.com/hashicorp/hcl/test-fixtures/decode_policy.json b/vendor/github.com/hashicorp/hcl/test-fixtures/decode_policy.json new file mode 100644 index 000000000..151864ee8 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/test-fixtures/decode_policy.json @@ -0,0 +1,19 @@ +{ + "key": { + "": { + "policy": "read" + }, + + "foo/": { + "policy": "write" + }, + + "foo/bar/": { + "policy": "read" + }, + + "foo/bar/baz": { + "policy": "deny" + } + } +} diff --git a/vendor/github.com/hashicorp/hcl/test-fixtures/decode_tf_variable.hcl b/vendor/github.com/hashicorp/hcl/test-fixtures/decode_tf_variable.hcl new file mode 100644 index 000000000..52dcaa1bc --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/test-fixtures/decode_tf_variable.hcl @@ -0,0 +1,10 @@ +variable "foo" { + default = "bar" + description = "bar" +} + +variable "amis" { + default = { + east = "foo" + } +} diff --git a/vendor/github.com/hashicorp/hcl/test-fixtures/decode_tf_variable.json b/vendor/github.com/hashicorp/hcl/test-fixtures/decode_tf_variable.json new file mode 100644 index 000000000..49f921ed0 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/test-fixtures/decode_tf_variable.json @@ -0,0 +1,14 @@ +{ + "variable": { + "foo": { + "default": "bar", + "description": "bar" + }, + + "amis": { + "default": { + "east": "foo" + } + } + } +} diff --git a/vendor/github.com/hashicorp/hcl/test-fixtures/empty.hcl b/vendor/github.com/hashicorp/hcl/test-fixtures/empty.hcl new file mode 100644 index 000000000..5be1b2315 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/test-fixtures/empty.hcl @@ -0,0 +1 @@ +resource "foo" {} diff --git a/vendor/github.com/hashicorp/hcl/test-fixtures/escape.hcl b/vendor/github.com/hashicorp/hcl/test-fixtures/escape.hcl new file mode 100644 index 000000000..f818b15e0 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/test-fixtures/escape.hcl @@ -0,0 +1,6 @@ +foo = "bar\"baz\\n" +bar = "new\nline" +qux = "back\\slash" +qax = "slash\\:colon" +nested = "${HH\\:mm\\:ss}" +nestedquotes = "${"\"stringwrappedinquotes\""}" diff --git a/vendor/github.com/hashicorp/hcl/test-fixtures/escape_backslash.hcl b/vendor/github.com/hashicorp/hcl/test-fixtures/escape_backslash.hcl new file mode 100644 index 000000000..bc337fb7c --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/test-fixtures/escape_backslash.hcl @@ -0,0 +1,5 @@ +output { + one = "${replace(var.sub_domain, ".", "\\.")}" + two = "${replace(var.sub_domain, ".", "\\\\.")}" + many = "${replace(var.sub_domain, ".", "\\\\\\\\.")}" +} diff --git a/vendor/github.com/hashicorp/hcl/test-fixtures/flat.hcl b/vendor/github.com/hashicorp/hcl/test-fixtures/flat.hcl new file mode 100644 index 000000000..9bca551f8 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/test-fixtures/flat.hcl @@ -0,0 +1,2 @@ +foo = "bar" +Key = 7 diff --git a/vendor/github.com/hashicorp/hcl/test-fixtures/float.hcl b/vendor/github.com/hashicorp/hcl/test-fixtures/float.hcl new file mode 100644 index 000000000..edf355e38 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/test-fixtures/float.hcl @@ -0,0 +1,2 @@ +a = 1.02 +b = 2 diff --git a/vendor/github.com/hashicorp/hcl/test-fixtures/float.json b/vendor/github.com/hashicorp/hcl/test-fixtures/float.json new file mode 100644 index 000000000..580868043 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/test-fixtures/float.json @@ -0,0 +1,4 @@ +{ + "a": 1.02, + "b": 2 +} diff --git a/vendor/github.com/hashicorp/hcl/test-fixtures/git_crypt.hcl b/vendor/github.com/hashicorp/hcl/test-fixtures/git_crypt.hcl new file mode 100644 index 000000000..f691948e1 Binary files /dev/null and b/vendor/github.com/hashicorp/hcl/test-fixtures/git_crypt.hcl differ diff --git a/vendor/github.com/hashicorp/hcl/test-fixtures/interpolate.json b/vendor/github.com/hashicorp/hcl/test-fixtures/interpolate.json new file mode 100644 index 000000000..cad015198 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/test-fixtures/interpolate.json @@ -0,0 +1,3 @@ +{ + "default": "${replace(\"europe-west\", \"-\", \" \")}" +} diff --git a/vendor/github.com/hashicorp/hcl/test-fixtures/list_of_lists.hcl b/vendor/github.com/hashicorp/hcl/test-fixtures/list_of_lists.hcl new file mode 100644 index 000000000..8af345849 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/test-fixtures/list_of_lists.hcl @@ -0,0 +1,2 @@ +foo = [["foo"], ["bar"]] + diff --git a/vendor/github.com/hashicorp/hcl/test-fixtures/list_of_maps.hcl b/vendor/github.com/hashicorp/hcl/test-fixtures/list_of_maps.hcl new file mode 100644 index 000000000..985a33bae --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/test-fixtures/list_of_maps.hcl @@ -0,0 +1,4 @@ +foo = [ + {somekey1 = "someval1"}, + {somekey2 = "someval2", someextrakey = "someextraval"}, +] diff --git a/vendor/github.com/hashicorp/hcl/test-fixtures/multiline.hcl b/vendor/github.com/hashicorp/hcl/test-fixtures/multiline.hcl new file mode 100644 index 000000000..f883bd707 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/test-fixtures/multiline.hcl @@ -0,0 +1,4 @@ +foo = < 0 { + pass = pass[:l-1] + fmt.Fprint(w, string(bs)) + } + } else if v == 13 || v == 10 { + break + } else if v == 3 { + err = ErrInterrupted + break + } else if v != 0 { + pass = append(pass, v) + fmt.Fprint(w, string(mask)) + } + } + + if counter > maxLength { + err = ErrMaxLengthExceeded + } + + return pass, err +} + +// GetPasswd returns the password read from the terminal without echoing input. +// The returned byte array does not include end-of-line characters. +func GetPasswd() ([]byte, error) { + return getPasswd("", false, os.Stdin, os.Stdout) +} + +// GetPasswdMasked returns the password read from the terminal, echoing asterisks. +// The returned byte array does not include end-of-line characters. +func GetPasswdMasked() ([]byte, error) { + return getPasswd("", true, os.Stdin, os.Stdout) +} + +// GetPasswdPrompt prompts the user and returns the password read from the terminal. +// If mask is true, then asterisks are echoed. +// The returned byte array does not include end-of-line characters. +func GetPasswdPrompt(prompt string, mask bool, r FdReader, w io.Writer) ([]byte, error) { + return getPasswd(prompt, mask, r, w) +} diff --git a/vendor/github.com/howeyc/gopass/pass_test.go b/vendor/github.com/howeyc/gopass/pass_test.go new file mode 100644 index 000000000..7ac315135 --- /dev/null +++ b/vendor/github.com/howeyc/gopass/pass_test.go @@ -0,0 +1,225 @@ +package gopass + +import ( + "bufio" + "bytes" + "fmt" + "io" + "io/ioutil" + "os" + "testing" + "time" +) + +// TestGetPasswd tests the password creation and output based on a byte buffer +// as input to mock the underlying getch() methods. +func TestGetPasswd(t *testing.T) { + type testData struct { + input []byte + + // Due to how backspaces are written, it is easier to manually write + // each expected output for the masked cases. + masked string + password string + byesLeft int + reason string + } + + ds := []testData{ + testData{[]byte("abc\n"), "***", "abc", 0, "Password parsing should stop at \\n"}, + testData{[]byte("abc\r"), "***", "abc", 0, "Password parsing should stop at \\r"}, + testData{[]byte("a\nbc\n"), "*", "a", 3, "Password parsing should stop at \\n"}, + testData{[]byte("*!]|\n"), "****", "*!]|", 0, "Special characters shouldn't affect the password."}, + + testData{[]byte("abc\r\n"), "***", "abc", 1, + "Password parsing should stop at \\r; Windows LINE_MODE should be unset so \\r is not converted to \\r\\n."}, + + testData{[]byte{'a', 'b', 'c', 8, '\n'}, "***\b \b", "ab", 0, "Backspace byte should remove the last read byte."}, + testData{[]byte{'a', 'b', 127, 'c', '\n'}, "**\b \b*", "ac", 0, "Delete byte should remove the last read byte."}, + testData{[]byte{'a', 'b', 127, 'c', 8, 127, '\n'}, "**\b \b*\b \b\b \b", "", 0, "Successive deletes continue to delete."}, + testData{[]byte{8, 8, 8, '\n'}, "", "", 0, "Deletes before characters are noops."}, + testData{[]byte{8, 8, 8, 'a', 'b', 'c', '\n'}, "***", "abc", 0, "Deletes before characters are noops."}, + + testData{[]byte{'a', 'b', 0, 'c', '\n'}, "***", "abc", 0, + "Nil byte should be ignored due; may get unintended nil bytes from syscalls on Windows."}, + } + + // Redirecting output for tests as they print to os.Stdout but we want to + // capture and test the output. + for _, masked := range []bool{true, false} { + for _, d := range ds { + pipeBytesToStdin(d.input) + + r, w, err := os.Pipe() + if err != nil { + t.Fatal(err.Error()) + } + + result, err := getPasswd("", masked, os.Stdin, w) + if err != nil { + t.Errorf("Error getting password: %s", err.Error()) + } + leftOnBuffer := flushStdin() + + // Test output (masked and unmasked). Delete/backspace actually + // deletes, overwrites and deletes again. As a result, we need to + // remove those from the pipe afterwards to mimic the console's + // interpretation of those bytes. + w.Close() + output, err := ioutil.ReadAll(r) + if err != nil { + t.Fatal(err.Error()) + } + var expectedOutput []byte + if masked { + expectedOutput = []byte(d.masked) + } else { + expectedOutput = []byte("") + } + if bytes.Compare(expectedOutput, output) != 0 { + t.Errorf("Expected output to equal %v (%q) but got %v (%q) instead when masked=%v. %s", expectedOutput, string(expectedOutput), output, string(output), masked, d.reason) + } + + if string(result) != d.password { + t.Errorf("Expected %q but got %q instead when masked=%v. %s", d.password, result, masked, d.reason) + } + + if leftOnBuffer != d.byesLeft { + t.Errorf("Expected %v bytes left on buffer but instead got %v when masked=%v. %s", d.byesLeft, leftOnBuffer, masked, d.reason) + } + } + } +} + +// TestPipe ensures we get our expected pipe behavior. +func TestPipe(t *testing.T) { + type testData struct { + input string + password string + expError error + } + ds := []testData{ + testData{"abc", "abc", io.EOF}, + testData{"abc\n", "abc", nil}, + testData{"abc\r", "abc", nil}, + testData{"abc\r\n", "abc", nil}, + } + + for _, d := range ds { + _, err := pipeToStdin(d.input) + if err != nil { + t.Log("Error writing input to stdin:", err) + t.FailNow() + } + pass, err := GetPasswd() + if string(pass) != d.password { + t.Errorf("Expected %q but got %q instead.", d.password, string(pass)) + } + if err != d.expError { + t.Errorf("Expected %v but got %q instead.", d.expError, err) + } + } +} + +// flushStdin reads from stdin for .5 seconds to ensure no bytes are left on +// the buffer. Returns the number of bytes read. +func flushStdin() int { + ch := make(chan byte) + go func(ch chan byte) { + reader := bufio.NewReader(os.Stdin) + for { + b, err := reader.ReadByte() + if err != nil { // Maybe log non io.EOF errors, if you want + close(ch) + return + } + ch <- b + } + close(ch) + }(ch) + + numBytes := 0 + for { + select { + case _, ok := <-ch: + if !ok { + return numBytes + } + numBytes++ + case <-time.After(500 * time.Millisecond): + return numBytes + } + } + return numBytes +} + +// pipeToStdin pipes the given string onto os.Stdin by replacing it with an +// os.Pipe. The write end of the pipe is closed so that EOF is read after the +// final byte. +func pipeToStdin(s string) (int, error) { + pipeReader, pipeWriter, err := os.Pipe() + if err != nil { + fmt.Println("Error getting os pipes:", err) + os.Exit(1) + } + os.Stdin = pipeReader + w, err := pipeWriter.WriteString(s) + pipeWriter.Close() + return w, err +} + +func pipeBytesToStdin(b []byte) (int, error) { + return pipeToStdin(string(b)) +} + +// TestGetPasswd_Err tests errors are properly handled from getch() +func TestGetPasswd_Err(t *testing.T) { + var inBuffer *bytes.Buffer + getch = func(io.Reader) (byte, error) { + b, err := inBuffer.ReadByte() + if err != nil { + return 13, err + } + if b == 'z' { + return 'z', fmt.Errorf("Forced error; byte returned should not be considered accurate.") + } + return b, nil + } + defer func() { getch = defaultGetCh }() + + for input, expectedPassword := range map[string]string{"abc": "abc", "abzc": "ab"} { + inBuffer = bytes.NewBufferString(input) + p, err := GetPasswdMasked() + if string(p) != expectedPassword { + t.Errorf("Expected %q but got %q instead.", expectedPassword, p) + } + if err == nil { + t.Errorf("Expected error to be returned.") + } + } +} + +func TestMaxPasswordLength(t *testing.T) { + type testData struct { + input []byte + expectedErr error + + // Helper field to output in case of failure; rather than hundreds of + // bytes. + inputDesc string + } + + ds := []testData{ + testData{append(bytes.Repeat([]byte{'a'}, maxLength), '\n'), nil, fmt.Sprintf("%v 'a' bytes followed by a newline", maxLength)}, + testData{append(bytes.Repeat([]byte{'a'}, maxLength+1), '\n'), ErrMaxLengthExceeded, fmt.Sprintf("%v 'a' bytes followed by a newline", maxLength+1)}, + testData{append(bytes.Repeat([]byte{0x00}, maxLength+1), '\n'), ErrMaxLengthExceeded, fmt.Sprintf("%v 0x00 bytes followed by a newline", maxLength+1)}, + } + + for _, d := range ds { + pipeBytesToStdin(d.input) + _, err := GetPasswd() + if err != d.expectedErr { + t.Errorf("Expected error to be %v; isntead got %v from %v", d.expectedErr, err, d.inputDesc) + } + } +} diff --git a/vendor/github.com/howeyc/gopass/terminal.go b/vendor/github.com/howeyc/gopass/terminal.go new file mode 100644 index 000000000..083564146 --- /dev/null +++ b/vendor/github.com/howeyc/gopass/terminal.go @@ -0,0 +1,25 @@ +// +build !solaris + +package gopass + +import "golang.org/x/crypto/ssh/terminal" + +type terminalState struct { + state *terminal.State +} + +func isTerminal(fd uintptr) bool { + return terminal.IsTerminal(int(fd)) +} + +func makeRaw(fd uintptr) (*terminalState, error) { + state, err := terminal.MakeRaw(int(fd)) + + return &terminalState{ + state: state, + }, err +} + +func restore(fd uintptr, oldState *terminalState) error { + return terminal.Restore(int(fd), oldState.state) +} diff --git a/vendor/github.com/howeyc/gopass/terminal_solaris.go b/vendor/github.com/howeyc/gopass/terminal_solaris.go new file mode 100644 index 000000000..257e1b4e8 --- /dev/null +++ b/vendor/github.com/howeyc/gopass/terminal_solaris.go @@ -0,0 +1,69 @@ +/* + * CDDL HEADER START + * + * The contents of this file are subject to the terms of the + * Common Development and Distribution License, Version 1.0 only + * (the "License"). You may not use this file except in compliance + * with the License. + * + * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE + * or http://www.opensolaris.org/os/licensing. + * See the License for the specific language governing permissions + * and limitations under the License. + * + * When distributing Covered Code, include this CDDL HEADER in each + * file and include the License file at usr/src/OPENSOLARIS.LICENSE. + * If applicable, add the following below this CDDL HEADER, with the + * fields enclosed by brackets "[]" replaced with your own identifying + * information: Portions Copyright [yyyy] [name of copyright owner] + * + * CDDL HEADER END + */ +// Below is derived from Solaris source, so CDDL license is included. + +package gopass + +import ( + "syscall" + + "golang.org/x/sys/unix" +) + +type terminalState struct { + state *unix.Termios +} + +// isTerminal returns true if there is a terminal attached to the given +// file descriptor. +// Source: http://src.illumos.org/source/xref/illumos-gate/usr/src/lib/libbc/libc/gen/common/isatty.c +func isTerminal(fd uintptr) bool { + var termio unix.Termio + err := unix.IoctlSetTermio(int(fd), unix.TCGETA, &termio) + return err == nil +} + +// makeRaw puts the terminal connected to the given file descriptor into raw +// mode and returns the previous state of the terminal so that it can be +// restored. +// Source: http://src.illumos.org/source/xref/illumos-gate/usr/src/lib/libast/common/uwin/getpass.c +func makeRaw(fd uintptr) (*terminalState, error) { + oldTermiosPtr, err := unix.IoctlGetTermios(int(fd), unix.TCGETS) + if err != nil { + return nil, err + } + oldTermios := *oldTermiosPtr + + newTermios := oldTermios + newTermios.Lflag &^= syscall.ECHO | syscall.ECHOE | syscall.ECHOK | syscall.ECHONL + if err := unix.IoctlSetTermios(int(fd), unix.TCSETS, &newTermios); err != nil { + return nil, err + } + + return &terminalState{ + state: oldTermiosPtr, + }, nil +} + +func restore(fd uintptr, oldState *terminalState) error { + return unix.IoctlSetTermios(int(fd), unix.TCSETS, oldState.state) +} diff --git a/vendor/github.com/imdario/mergo/.travis.yml b/vendor/github.com/imdario/mergo/.travis.yml new file mode 100644 index 000000000..9d91c6339 --- /dev/null +++ b/vendor/github.com/imdario/mergo/.travis.yml @@ -0,0 +1,2 @@ +language: go +install: go get -t diff --git a/vendor/github.com/imdario/mergo/CODE_OF_CONDUCT.md b/vendor/github.com/imdario/mergo/CODE_OF_CONDUCT.md new file mode 100644 index 000000000..469b44907 --- /dev/null +++ b/vendor/github.com/imdario/mergo/CODE_OF_CONDUCT.md @@ -0,0 +1,46 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at i@dario.im. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version] + +[homepage]: http://contributor-covenant.org +[version]: http://contributor-covenant.org/version/1/4/ diff --git a/vendor/github.com/imdario/mergo/LICENSE b/vendor/github.com/imdario/mergo/LICENSE new file mode 100644 index 000000000..686680298 --- /dev/null +++ b/vendor/github.com/imdario/mergo/LICENSE @@ -0,0 +1,28 @@ +Copyright (c) 2013 Dario Castañé. All rights reserved. +Copyright (c) 2012 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/imdario/mergo/README.md b/vendor/github.com/imdario/mergo/README.md new file mode 100644 index 000000000..b13106979 --- /dev/null +++ b/vendor/github.com/imdario/mergo/README.md @@ -0,0 +1,141 @@ +# Mergo + +A helper to merge structs and maps in Golang. Useful for configuration default values, avoiding messy if-statements. + +Also a lovely [comune](http://en.wikipedia.org/wiki/Mergo) (municipality) in the Province of Ancona in the Italian region Marche. + +![Mergo dall'alto](http://www.comune.mergo.an.it/Siti/Mergo/Immagini/Foto/mergo_dall_alto.jpg) + +## Status + +It is ready for production use. It works fine after extensive use in the wild. + +[![Build Status][1]][2] +[![GoDoc][3]][4] +[![GoCard][5]][6] + +[1]: https://travis-ci.org/imdario/mergo.png +[2]: https://travis-ci.org/imdario/mergo +[3]: https://godoc.org/github.com/imdario/mergo?status.svg +[4]: https://godoc.org/github.com/imdario/mergo +[5]: https://goreportcard.com/badge/imdario/mergo +[6]: https://goreportcard.com/report/github.com/imdario/mergo + +### Important note + +Mergo is intended to assign **only** zero value fields on destination with source value. Since April 6th it works like this. Before it didn't work properly, causing some random overwrites. After some issues and PRs I found it didn't merge as I designed it. Thanks to [imdario/mergo#8](https://github.com/imdario/mergo/pull/8) overwriting functions were added and the wrong behavior was clearly detected. + +If you were using Mergo **before** April 6th 2015, please check your project works as intended after updating your local copy with ```go get -u github.com/imdario/mergo```. I apologize for any issue caused by its previous behavior and any future bug that Mergo could cause (I hope it won't!) in existing projects after the change (release 0.2.0). + +### Mergo in the wild + +- [docker/docker](https://github.com/docker/docker/) +- [kubernetes/kubernetes](https://github.com/kubernetes/kubernetes) +- [imdario/zas](https://github.com/imdario/zas) +- [soniah/dnsmadeeasy](https://github.com/soniah/dnsmadeeasy) +- [EagerIO/Stout](https://github.com/EagerIO/Stout) +- [lynndylanhurley/defsynth-api](https://github.com/lynndylanhurley/defsynth-api) +- [russross/canvasassignments](https://github.com/russross/canvasassignments) +- [rdegges/cryptly-api](https://github.com/rdegges/cryptly-api) +- [casualjim/exeggutor](https://github.com/casualjim/exeggutor) +- [divshot/gitling](https://github.com/divshot/gitling) +- [RWJMurphy/gorl](https://github.com/RWJMurphy/gorl) +- [andrerocker/deploy42](https://github.com/andrerocker/deploy42) +- [elwinar/rambler](https://github.com/elwinar/rambler) +- [tmaiaroto/gopartman](https://github.com/tmaiaroto/gopartman) +- [jfbus/impressionist](https://github.com/jfbus/impressionist) +- [Jmeyering/zealot](https://github.com/Jmeyering/zealot) +- [godep-migrator/rigger-host](https://github.com/godep-migrator/rigger-host) +- [Dronevery/MultiwaySwitch-Go](https://github.com/Dronevery/MultiwaySwitch-Go) +- [thoas/picfit](https://github.com/thoas/picfit) +- [mantasmatelis/whooplist-server](https://github.com/mantasmatelis/whooplist-server) +- [jnuthong/item_search](https://github.com/jnuthong/item_search) +- [Iris Web Framework](https://github.com/kataras/iris) + +## Installation + + go get github.com/imdario/mergo + + // use in your .go code + import ( + "github.com/imdario/mergo" + ) + +## Usage + +You can only merge same-type structs with exported fields initialized as zero value of their type and same-types maps. Mergo won't merge unexported (private) fields but will do recursively any exported one. Also maps will be merged recursively except for structs inside maps (because they are not addressable using Go reflection). + +```go +if err := mergo.Merge(&dst, src); err != nil { + // ... +} +``` + +Also, you can merge overwriting values using MergeWithOverwrite. + +```go +if err := mergo.MergeWithOverwrite(&dst, src); err != nil { + // ... +} +``` + +Additionally, you can map a map[string]interface{} to a struct (and otherwise, from struct to map), following the same restrictions as in Merge(). Keys are capitalized to find each corresponding exported field. + +```go +if err := mergo.Map(&dst, srcMap); err != nil { + // ... +} +``` + +Warning: if you map a struct to map, it won't do it recursively. Don't expect Mergo to map struct members of your struct as map[string]interface{}. They will be just assigned as values. + +More information and examples in [godoc documentation](http://godoc.org/github.com/imdario/mergo). + +### Nice example + +```go +package main + +import ( + "fmt" + "github.com/imdario/mergo" +) + +type Foo struct { + A string + B int64 +} + +func main() { + src := Foo{ + A: "one", + B: 2, + } + + dest := Foo{ + A: "two", + } + + mergo.Merge(&dest, src) + + fmt.Println(dest) + // Will print + // {two 2} +} +``` + +Note: if test are failing due missing package, please execute: + + go get gopkg.in/yaml.v1 + +## Contact me + +If I can help you, you have an idea or you are using Mergo in your projects, don't hesitate to drop me a line (or a pull request): [@im_dario](https://twitter.com/im_dario) + +## About + +Written by [Dario Castañé](http://dario.im). + +## License + +[BSD 3-Clause](http://opensource.org/licenses/BSD-3-Clause) license, as [Go language](http://golang.org/LICENSE). diff --git a/vendor/github.com/imdario/mergo/doc.go b/vendor/github.com/imdario/mergo/doc.go new file mode 100644 index 000000000..6e9aa7baf --- /dev/null +++ b/vendor/github.com/imdario/mergo/doc.go @@ -0,0 +1,44 @@ +// Copyright 2013 Dario Castañé. All rights reserved. +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* +Package mergo merges same-type structs and maps by setting default values in zero-value fields. + +Mergo won't merge unexported (private) fields but will do recursively any exported one. It also won't merge structs inside maps (because they are not addressable using Go reflection). + +Usage + +From my own work-in-progress project: + + type networkConfig struct { + Protocol string + Address string + ServerType string `json: "server_type"` + Port uint16 + } + + type FssnConfig struct { + Network networkConfig + } + + var fssnDefault = FssnConfig { + networkConfig { + "tcp", + "127.0.0.1", + "http", + 31560, + }, + } + + // Inside a function [...] + + if err := mergo.Merge(&config, fssnDefault); err != nil { + log.Fatal(err) + } + + // More code [...] + +*/ +package mergo diff --git a/vendor/github.com/imdario/mergo/issue17_test.go b/vendor/github.com/imdario/mergo/issue17_test.go new file mode 100644 index 000000000..0ee96f377 --- /dev/null +++ b/vendor/github.com/imdario/mergo/issue17_test.go @@ -0,0 +1,25 @@ +package mergo + +import ( + "encoding/json" + "testing" +) + +var ( + request = `{"timestamp":null, "name": "foo"}` + maprequest = map[string]interface{}{ + "timestamp": nil, + "name": "foo", + "newStuff": "foo", + } +) + +func TestIssue17MergeWithOverwrite(t *testing.T) { + var something map[string]interface{} + if err := json.Unmarshal([]byte(request), &something); err != nil { + t.Errorf("Error while Unmarshalling maprequest %s", err) + } + if err := MergeWithOverwrite(&something, maprequest); err != nil { + t.Errorf("Error while merging %s", err) + } +} diff --git a/vendor/github.com/imdario/mergo/issue23_test.go b/vendor/github.com/imdario/mergo/issue23_test.go new file mode 100644 index 000000000..9c3258413 --- /dev/null +++ b/vendor/github.com/imdario/mergo/issue23_test.go @@ -0,0 +1,27 @@ +package mergo + +import ( + "testing" + "time" +) + +type document struct { + Created *time.Time +} + +func TestIssue23MergeWithOverwrite(t *testing.T) { + now := time.Now() + dst := document{ + &now, + } + expected := time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC) + src := document{ + &expected, + } + if err := MergeWithOverwrite(&dst, src); err != nil { + t.Errorf("Error while merging %s", err) + } + if dst.Created != src.Created { + t.Fatalf("Created not merged in properly: dst.Created(%v) != src.Created(%v)", dst.Created, src.Created) + } +} diff --git a/vendor/github.com/imdario/mergo/issue38_test.go b/vendor/github.com/imdario/mergo/issue38_test.go new file mode 100644 index 000000000..286b68cb1 --- /dev/null +++ b/vendor/github.com/imdario/mergo/issue38_test.go @@ -0,0 +1,59 @@ +package mergo + +import ( + "testing" + "time" +) + +type structWithoutTimePointer struct { + Created time.Time +} + +func TestIssue38Merge(t *testing.T) { + dst := structWithoutTimePointer{ + time.Now(), + } + + expected := time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC) + src := structWithoutTimePointer{ + expected, + } + if err := Merge(&dst, src); err != nil { + t.Errorf("Error while merging %s", err) + } + if dst.Created == src.Created { + t.Fatalf("Created merged unexpectedly: dst.Created(%v) == src.Created(%v)", dst.Created, src.Created) + } +} + +func TestIssue38MergeEmptyStruct(t *testing.T) { + dst := structWithoutTimePointer{} + + expected := time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC) + src := structWithoutTimePointer{ + expected, + } + if err := Merge(&dst, src); err != nil { + t.Errorf("Error while merging %s", err) + } + if dst.Created == src.Created { + t.Fatalf("Created merged unexpectedly: dst.Created(%v) == src.Created(%v)", dst.Created, src.Created) + } +} + +func TestIssue38MergeWithOverwrite(t *testing.T) { + dst := structWithoutTimePointer{ + time.Now(), + } + + expected := time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC) + src := structWithoutTimePointer{ + expected, + } + if err := MergeWithOverwrite(&dst, src); err != nil { + t.Errorf("Error while merging %s", err) + } + if dst.Created != src.Created { + t.Fatalf("Created not merged in properly: dst.Created(%v) != src.Created(%v)", dst.Created, src.Created) + } +} diff --git a/vendor/github.com/imdario/mergo/map.go b/vendor/github.com/imdario/mergo/map.go new file mode 100644 index 000000000..99002565f --- /dev/null +++ b/vendor/github.com/imdario/mergo/map.go @@ -0,0 +1,166 @@ +// Copyright 2014 Dario Castañé. All rights reserved. +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Based on src/pkg/reflect/deepequal.go from official +// golang's stdlib. + +package mergo + +import ( + "fmt" + "reflect" + "unicode" + "unicode/utf8" +) + +func changeInitialCase(s string, mapper func(rune) rune) string { + if s == "" { + return s + } + r, n := utf8.DecodeRuneInString(s) + return string(mapper(r)) + s[n:] +} + +func isExported(field reflect.StructField) bool { + r, _ := utf8.DecodeRuneInString(field.Name) + return r >= 'A' && r <= 'Z' +} + +// Traverses recursively both values, assigning src's fields values to dst. +// The map argument tracks comparisons that have already been seen, which allows +// short circuiting on recursive types. +func deepMap(dst, src reflect.Value, visited map[uintptr]*visit, depth int, overwrite bool) (err error) { + if dst.CanAddr() { + addr := dst.UnsafeAddr() + h := 17 * addr + seen := visited[h] + typ := dst.Type() + for p := seen; p != nil; p = p.next { + if p.ptr == addr && p.typ == typ { + return nil + } + } + // Remember, remember... + visited[h] = &visit{addr, typ, seen} + } + zeroValue := reflect.Value{} + switch dst.Kind() { + case reflect.Map: + dstMap := dst.Interface().(map[string]interface{}) + for i, n := 0, src.NumField(); i < n; i++ { + srcType := src.Type() + field := srcType.Field(i) + if !isExported(field) { + continue + } + fieldName := field.Name + fieldName = changeInitialCase(fieldName, unicode.ToLower) + if v, ok := dstMap[fieldName]; !ok || (isEmptyValue(reflect.ValueOf(v)) || overwrite) { + dstMap[fieldName] = src.Field(i).Interface() + } + } + case reflect.Ptr: + if dst.IsNil() { + v := reflect.New(dst.Type().Elem()) + dst.Set(v) + } + dst = dst.Elem() + fallthrough + case reflect.Struct: + srcMap := src.Interface().(map[string]interface{}) + for key := range srcMap { + srcValue := srcMap[key] + fieldName := changeInitialCase(key, unicode.ToUpper) + dstElement := dst.FieldByName(fieldName) + if dstElement == zeroValue { + // We discard it because the field doesn't exist. + continue + } + srcElement := reflect.ValueOf(srcValue) + dstKind := dstElement.Kind() + srcKind := srcElement.Kind() + if srcKind == reflect.Ptr && dstKind != reflect.Ptr { + srcElement = srcElement.Elem() + srcKind = reflect.TypeOf(srcElement.Interface()).Kind() + } else if dstKind == reflect.Ptr { + // Can this work? I guess it can't. + if srcKind != reflect.Ptr && srcElement.CanAddr() { + srcPtr := srcElement.Addr() + srcElement = reflect.ValueOf(srcPtr) + srcKind = reflect.Ptr + } + } + + if !srcElement.IsValid() { + continue + } + if srcKind == dstKind { + if err = deepMerge(dstElement, srcElement, visited, depth+1, overwrite); err != nil { + return + } + } else if dstKind == reflect.Interface && dstElement.Kind() == reflect.Interface { + if err = deepMerge(dstElement, srcElement, visited, depth+1, overwrite); err != nil { + return + } + } else if srcKind == reflect.Map { + if err = deepMap(dstElement, srcElement, visited, depth+1, overwrite); err != nil { + return + } + } else { + return fmt.Errorf("type mismatch on %s field: found %v, expected %v", fieldName, srcKind, dstKind) + } + } + } + return +} + +// Map sets fields' values in dst from src. +// src can be a map with string keys or a struct. dst must be the opposite: +// if src is a map, dst must be a valid pointer to struct. If src is a struct, +// dst must be map[string]interface{}. +// It won't merge unexported (private) fields and will do recursively +// any exported field. +// If dst is a map, keys will be src fields' names in lower camel case. +// Missing key in src that doesn't match a field in dst will be skipped. This +// doesn't apply if dst is a map. +// This is separated method from Merge because it is cleaner and it keeps sane +// semantics: merging equal types, mapping different (restricted) types. +func Map(dst, src interface{}) error { + return _map(dst, src, false) +} + +// MapWithOverwrite will do the same as Map except that non-empty dst attributes will be overriden by +// non-empty src attribute values. +func MapWithOverwrite(dst, src interface{}) error { + return _map(dst, src, true) +} + +func _map(dst, src interface{}, overwrite bool) error { + var ( + vDst, vSrc reflect.Value + err error + ) + if vDst, vSrc, err = resolveValues(dst, src); err != nil { + return err + } + // To be friction-less, we redirect equal-type arguments + // to deepMerge. Only because arguments can be anything. + if vSrc.Kind() == vDst.Kind() { + return deepMerge(vDst, vSrc, make(map[uintptr]*visit), 0, overwrite) + } + switch vSrc.Kind() { + case reflect.Struct: + if vDst.Kind() != reflect.Map { + return ErrExpectedMapAsDestination + } + case reflect.Map: + if vDst.Kind() != reflect.Struct { + return ErrExpectedStructAsDestination + } + default: + return ErrNotSupported + } + return deepMap(vDst, vSrc, make(map[uintptr]*visit), 0, overwrite) +} diff --git a/vendor/github.com/imdario/mergo/merge.go b/vendor/github.com/imdario/mergo/merge.go new file mode 100644 index 000000000..052b9fe78 --- /dev/null +++ b/vendor/github.com/imdario/mergo/merge.go @@ -0,0 +1,167 @@ +// Copyright 2013 Dario Castañé. All rights reserved. +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Based on src/pkg/reflect/deepequal.go from official +// golang's stdlib. + +package mergo + +import ( + "reflect" +) + +func hasExportedField(dst reflect.Value) (exported bool) { + for i, n := 0, dst.NumField(); i < n; i++ { + field := dst.Type().Field(i) + if field.Anonymous { + exported = exported || hasExportedField(dst.Field(i)) + } else { + exported = exported || len(field.PkgPath) == 0 + } + } + return +} + +// Traverses recursively both values, assigning src's fields values to dst. +// The map argument tracks comparisons that have already been seen, which allows +// short circuiting on recursive types. +func deepMerge(dst, src reflect.Value, visited map[uintptr]*visit, depth int, overwrite bool) (err error) { + if !src.IsValid() { + return + } + if dst.CanAddr() { + addr := dst.UnsafeAddr() + h := 17 * addr + seen := visited[h] + typ := dst.Type() + for p := seen; p != nil; p = p.next { + if p.ptr == addr && p.typ == typ { + return nil + } + } + // Remember, remember... + visited[h] = &visit{addr, typ, seen} + } + switch dst.Kind() { + case reflect.Struct: + if hasExportedField(dst) { + for i, n := 0, dst.NumField(); i < n; i++ { + if err = deepMerge(dst.Field(i), src.Field(i), visited, depth+1, overwrite); err != nil { + return + } + } + } else { + if dst.CanSet() && !isEmptyValue(src) && (overwrite || isEmptyValue(dst)) { + dst.Set(src) + } + } + case reflect.Map: + if len(src.MapKeys()) == 0 && !src.IsNil() && len(dst.MapKeys()) == 0 { + dst.Set(reflect.MakeMap(dst.Type())) + return + } + for _, key := range src.MapKeys() { + srcElement := src.MapIndex(key) + if !srcElement.IsValid() { + continue + } + dstElement := dst.MapIndex(key) + switch srcElement.Kind() { + case reflect.Chan, reflect.Func, reflect.Map, reflect.Ptr, reflect.Interface, reflect.Slice: + if srcElement.IsNil() { + continue + } + fallthrough + default: + if !srcElement.CanInterface() { + continue + } + switch reflect.TypeOf(srcElement.Interface()).Kind() { + case reflect.Struct: + fallthrough + case reflect.Ptr: + fallthrough + case reflect.Map: + if err = deepMerge(dstElement, srcElement, visited, depth+1, overwrite); err != nil { + return + } + } + } + if dstElement.IsValid() && reflect.TypeOf(srcElement.Interface()).Kind() == reflect.Map { + continue + } + + if !isEmptyValue(srcElement) && (overwrite || (!dstElement.IsValid() || isEmptyValue(dst))) { + if dst.IsNil() { + dst.Set(reflect.MakeMap(dst.Type())) + } + dst.SetMapIndex(key, srcElement) + } + } + case reflect.Ptr: + fallthrough + case reflect.Interface: + if src.Kind() != reflect.Interface { + if dst.IsNil() || overwrite { + if dst.CanSet() && (overwrite || isEmptyValue(dst)) { + dst.Set(src) + } + } else if src.Kind() == reflect.Ptr { + if err = deepMerge(dst.Elem(), src.Elem(), visited, depth+1, overwrite); err != nil { + return + } + } else if dst.Elem().Type() == src.Type() { + if err = deepMerge(dst.Elem(), src, visited, depth+1, overwrite); err != nil { + return + } + } else { + return ErrDifferentArgumentsTypes + } + break + } + if src.IsNil() { + break + } else if dst.IsNil() || overwrite { + if dst.CanSet() && (overwrite || isEmptyValue(dst)) { + dst.Set(src) + } + } else if err = deepMerge(dst.Elem(), src.Elem(), visited, depth+1, overwrite); err != nil { + return + } + default: + if dst.CanSet() && !isEmptyValue(src) && (overwrite || isEmptyValue(dst)) { + dst.Set(src) + } + } + return +} + +// Merge will fill any empty for value type attributes on the dst struct using corresponding +// src attributes if they themselves are not empty. dst and src must be valid same-type structs +// and dst must be a pointer to struct. +// It won't merge unexported (private) fields and will do recursively any exported field. +func Merge(dst, src interface{}) error { + return merge(dst, src, false) +} + +// MergeWithOverwrite will do the same as Merge except that non-empty dst attributes will be overriden by +// non-empty src attribute values. +func MergeWithOverwrite(dst, src interface{}) error { + return merge(dst, src, true) +} + +func merge(dst, src interface{}, overwrite bool) error { + var ( + vDst, vSrc reflect.Value + err error + ) + if vDst, vSrc, err = resolveValues(dst, src); err != nil { + return err + } + if vDst.Type() != vSrc.Type() { + return ErrDifferentArgumentsTypes + } + return deepMerge(vDst, vSrc, make(map[uintptr]*visit), 0, overwrite) +} diff --git a/vendor/github.com/imdario/mergo/mergo.go b/vendor/github.com/imdario/mergo/mergo.go new file mode 100644 index 000000000..79ccdf5cb --- /dev/null +++ b/vendor/github.com/imdario/mergo/mergo.go @@ -0,0 +1,90 @@ +// Copyright 2013 Dario Castañé. All rights reserved. +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Based on src/pkg/reflect/deepequal.go from official +// golang's stdlib. + +package mergo + +import ( + "errors" + "reflect" +) + +// Errors reported by Mergo when it finds invalid arguments. +var ( + ErrNilArguments = errors.New("src and dst must not be nil") + ErrDifferentArgumentsTypes = errors.New("src and dst must be of same type") + ErrNotSupported = errors.New("only structs and maps are supported") + ErrExpectedMapAsDestination = errors.New("dst was expected to be a map") + ErrExpectedStructAsDestination = errors.New("dst was expected to be a struct") +) + +// During deepMerge, must keep track of checks that are +// in progress. The comparison algorithm assumes that all +// checks in progress are true when it reencounters them. +// Visited are stored in a map indexed by 17 * a1 + a2; +type visit struct { + ptr uintptr + typ reflect.Type + next *visit +} + +// From src/pkg/encoding/json. +func isEmptyValue(v reflect.Value) bool { + switch v.Kind() { + case reflect.Array, reflect.Map, reflect.Slice, reflect.String: + return v.Len() == 0 + case reflect.Bool: + return !v.Bool() + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return v.Int() == 0 + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return v.Uint() == 0 + case reflect.Float32, reflect.Float64: + return v.Float() == 0 + case reflect.Interface, reflect.Ptr, reflect.Func: + return v.IsNil() + } + return false +} + +func resolveValues(dst, src interface{}) (vDst, vSrc reflect.Value, err error) { + if dst == nil || src == nil { + err = ErrNilArguments + return + } + vDst = reflect.ValueOf(dst).Elem() + if vDst.Kind() != reflect.Struct && vDst.Kind() != reflect.Map { + err = ErrNotSupported + return + } + vSrc = reflect.ValueOf(src) + // We check if vSrc is a pointer to dereference it. + if vSrc.Kind() == reflect.Ptr { + vSrc = vSrc.Elem() + } + return +} + +// Traverses recursively both values, assigning src's fields values to dst. +// The map argument tracks comparisons that have already been seen, which allows +// short circuiting on recursive types. +func deeper(dst, src reflect.Value, visited map[uintptr]*visit, depth int) (err error) { + if dst.CanAddr() { + addr := dst.UnsafeAddr() + h := 17 * addr + seen := visited[h] + typ := dst.Type() + for p := seen; p != nil; p = p.next { + if p.ptr == addr && p.typ == typ { + return nil + } + } + // Remember, remember... + visited[h] = &visit{addr, typ, seen} + } + return // TODO refactor +} diff --git a/vendor/github.com/imdario/mergo/mergo_test.go b/vendor/github.com/imdario/mergo/mergo_test.go new file mode 100644 index 000000000..e167c332a --- /dev/null +++ b/vendor/github.com/imdario/mergo/mergo_test.go @@ -0,0 +1,662 @@ +// Copyright 2013 Dario Castañé. All rights reserved. +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mergo + +import ( + "gopkg.in/yaml.v2" + "io/ioutil" + "reflect" + "testing" + "time" +) + +type simpleTest struct { + Value int +} + +type complexTest struct { + St simpleTest + sz int + ID string +} + +type mapTest struct { + M map[int]int +} + +type ifcTest struct { + I interface{} +} + +type moreComplextText struct { + Ct complexTest + St simpleTest + Nt simpleTest +} + +type pointerTest struct { + C *simpleTest +} + +type sliceTest struct { + S []int +} + +func TestKb(t *testing.T) { + type testStruct struct { + Name string + KeyValue map[string]interface{} + } + + akv := make(map[string]interface{}) + akv["Key1"] = "not value 1" + akv["Key2"] = "value2" + a := testStruct{} + a.Name = "A" + a.KeyValue = akv + + bkv := make(map[string]interface{}) + bkv["Key1"] = "value1" + bkv["Key3"] = "value3" + b := testStruct{} + b.Name = "B" + b.KeyValue = bkv + + ekv := make(map[string]interface{}) + ekv["Key1"] = "value1" + ekv["Key2"] = "value2" + ekv["Key3"] = "value3" + expected := testStruct{} + expected.Name = "B" + expected.KeyValue = ekv + + Merge(&b, a) + + if !reflect.DeepEqual(b, expected) { + t.Errorf("Actual: %#v did not match \nExpected: %#v", b, expected) + } +} + +func TestNil(t *testing.T) { + if err := Merge(nil, nil); err != ErrNilArguments { + t.Fail() + } +} + +func TestDifferentTypes(t *testing.T) { + a := simpleTest{42} + b := 42 + if err := Merge(&a, b); err != ErrDifferentArgumentsTypes { + t.Fail() + } +} + +func TestSimpleStruct(t *testing.T) { + a := simpleTest{} + b := simpleTest{42} + if err := Merge(&a, b); err != nil { + t.FailNow() + } + if a.Value != 42 { + t.Fatalf("b not merged in properly: a.Value(%d) != b.Value(%d)", a.Value, b.Value) + } + if !reflect.DeepEqual(a, b) { + t.FailNow() + } +} + +func TestComplexStruct(t *testing.T) { + a := complexTest{} + a.ID = "athing" + b := complexTest{simpleTest{42}, 1, "bthing"} + if err := Merge(&a, b); err != nil { + t.FailNow() + } + if a.St.Value != 42 { + t.Fatalf("b not merged in properly: a.St.Value(%d) != b.St.Value(%d)", a.St.Value, b.St.Value) + } + if a.sz == 1 { + t.Fatalf("a's private field sz not preserved from merge: a.sz(%d) == b.sz(%d)", a.sz, b.sz) + } + if a.ID == b.ID { + t.Fatalf("a's field ID merged unexpectedly: a.ID(%s) == b.ID(%s)", a.ID, b.ID) + } +} + +func TestComplexStructWithOverwrite(t *testing.T) { + a := complexTest{simpleTest{1}, 1, "do-not-overwrite-with-empty-value"} + b := complexTest{simpleTest{42}, 2, ""} + + expect := complexTest{simpleTest{42}, 1, "do-not-overwrite-with-empty-value"} + if err := MergeWithOverwrite(&a, b); err != nil { + t.FailNow() + } + + if !reflect.DeepEqual(a, expect) { + t.Fatalf("Test failed:\ngot :\n%#v\n\nwant :\n%#v\n\n", a, expect) + } +} + +func TestPointerStruct(t *testing.T) { + s1 := simpleTest{} + s2 := simpleTest{19} + a := pointerTest{&s1} + b := pointerTest{&s2} + if err := Merge(&a, b); err != nil { + t.FailNow() + } + if a.C.Value != b.C.Value { + t.Fatalf("b not merged in properly: a.C.Value(%d) != b.C.Value(%d)", a.C.Value, b.C.Value) + } +} + +type embeddingStruct struct { + embeddedStruct +} + +type embeddedStruct struct { + A string +} + +func TestEmbeddedStruct(t *testing.T) { + tests := []struct { + src embeddingStruct + dst embeddingStruct + expected embeddingStruct + }{ + { + src: embeddingStruct{ + embeddedStruct{"foo"}, + }, + dst: embeddingStruct{ + embeddedStruct{""}, + }, + expected: embeddingStruct{ + embeddedStruct{"foo"}, + }, + }, + { + src: embeddingStruct{ + embeddedStruct{""}, + }, + dst: embeddingStruct{ + embeddedStruct{"bar"}, + }, + expected: embeddingStruct{ + embeddedStruct{"bar"}, + }, + }, + { + src: embeddingStruct{ + embeddedStruct{"foo"}, + }, + dst: embeddingStruct{ + embeddedStruct{"bar"}, + }, + expected: embeddingStruct{ + embeddedStruct{"bar"}, + }, + }, + } + + for _, test := range tests { + err := Merge(&test.dst, test.src) + if err != nil { + t.Errorf("unexpected error: %v", err) + continue + } + if !reflect.DeepEqual(test.dst, test.expected) { + t.Errorf("unexpected output\nexpected:\n%+v\nsaw:\n%+v\n", test.expected, test.dst) + } + } +} + +func TestPointerStructNil(t *testing.T) { + a := pointerTest{nil} + b := pointerTest{&simpleTest{19}} + if err := Merge(&a, b); err != nil { + t.FailNow() + } + if a.C.Value != b.C.Value { + t.Fatalf("b not merged in a properly: a.C.Value(%d) != b.C.Value(%d)", a.C.Value, b.C.Value) + } +} + +func TestSliceStruct(t *testing.T) { + a := sliceTest{} + b := sliceTest{[]int{1, 2, 3}} + if err := Merge(&a, b); err != nil { + t.FailNow() + } + if len(b.S) != 3 { + t.FailNow() + } + if len(a.S) != len(b.S) { + t.Fatalf("b not merged in a proper way %d != %d", len(a.S), len(b.S)) + } + + a = sliceTest{[]int{1}} + b = sliceTest{[]int{1, 2, 3}} + if err := Merge(&a, b); err != nil { + t.FailNow() + } + if len(a.S) != 1 { + t.FailNow() + } + if len(a.S) == len(b.S) { + t.Fatalf("b merged unexpectedly %d != %d", len(a.S), len(b.S)) + } +} + +func TestEmptyMaps(t *testing.T) { + a := mapTest{} + b := mapTest{ + map[int]int{}, + } + if err := Merge(&a, b); err != nil { + t.Fail() + } + if !reflect.DeepEqual(a, b) { + t.FailNow() + } +} + +func TestEmptyToEmptyMaps(t *testing.T) { + a := mapTest{} + b := mapTest{} + if err := Merge(&a, b); err != nil { + t.Fail() + } + if !reflect.DeepEqual(a, b) { + t.FailNow() + } +} + +func TestEmptyToNotEmptyMaps(t *testing.T) { + a := mapTest{map[int]int{ + 1: 2, + 3: 4, + }} + aa := mapTest{map[int]int{ + 1: 2, + 3: 4, + }} + b := mapTest{ + map[int]int{}, + } + if err := Merge(&a, b); err != nil { + t.Fail() + } + if !reflect.DeepEqual(a, aa) { + t.FailNow() + } +} + +func TestMapsWithOverwrite(t *testing.T) { + m := map[string]simpleTest{ + "a": {}, // overwritten by 16 + "b": {42}, // not overwritten by empty value + "c": {13}, // overwritten by 12 + "d": {61}, + } + n := map[string]simpleTest{ + "a": {16}, + "b": {}, + "c": {12}, + "e": {14}, + } + expect := map[string]simpleTest{ + "a": {16}, + "b": {}, + "c": {12}, + "d": {61}, + "e": {14}, + } + + if err := MergeWithOverwrite(&m, n); err != nil { + t.Fatalf(err.Error()) + } + + if !reflect.DeepEqual(m, expect) { + t.Fatalf("Test failed:\ngot :\n%#v\n\nwant :\n%#v\n\n", m, expect) + } +} + +func TestMaps(t *testing.T) { + m := map[string]simpleTest{ + "a": {}, + "b": {42}, + "c": {13}, + "d": {61}, + } + n := map[string]simpleTest{ + "a": {16}, + "b": {}, + "c": {12}, + "e": {14}, + } + expect := map[string]simpleTest{ + "a": {0}, + "b": {42}, + "c": {13}, + "d": {61}, + "e": {14}, + } + + if err := Merge(&m, n); err != nil { + t.Fatalf(err.Error()) + } + + if !reflect.DeepEqual(m, expect) { + t.Fatalf("Test failed:\ngot :\n%#v\n\nwant :\n%#v\n\n", m, expect) + } + if m["a"].Value != 0 { + t.Fatalf(`n merged in m because I solved non-addressable map values TODO: m["a"].Value(%d) != n["a"].Value(%d)`, m["a"].Value, n["a"].Value) + } + if m["b"].Value != 42 { + t.Fatalf(`n wrongly merged in m: m["b"].Value(%d) != n["b"].Value(%d)`, m["b"].Value, n["b"].Value) + } + if m["c"].Value != 13 { + t.Fatalf(`n overwritten in m: m["c"].Value(%d) != n["c"].Value(%d)`, m["c"].Value, n["c"].Value) + } +} + +func TestYAMLMaps(t *testing.T) { + thing := loadYAML("testdata/thing.yml") + license := loadYAML("testdata/license.yml") + ft := thing["fields"].(map[interface{}]interface{}) + fl := license["fields"].(map[interface{}]interface{}) + // license has one extra field (site) and another already existing in thing (author) that Mergo won't override. + expectedLength := len(ft) + len(fl) - 1 + if err := Merge(&license, thing); err != nil { + t.Fatal(err.Error()) + } + currentLength := len(license["fields"].(map[interface{}]interface{})) + if currentLength != expectedLength { + t.Fatalf(`thing not merged in license properly, license must have %d elements instead of %d`, expectedLength, currentLength) + } + fields := license["fields"].(map[interface{}]interface{}) + if _, ok := fields["id"]; !ok { + t.Fatalf(`thing not merged in license properly, license must have a new id field from thing`) + } +} + +func TestTwoPointerValues(t *testing.T) { + a := &simpleTest{} + b := &simpleTest{42} + if err := Merge(a, b); err != nil { + t.Fatalf(`Boom. You crossed the streams: %s`, err) + } +} + +func TestMap(t *testing.T) { + a := complexTest{} + a.ID = "athing" + c := moreComplextText{a, simpleTest{}, simpleTest{}} + b := map[string]interface{}{ + "ct": map[string]interface{}{ + "st": map[string]interface{}{ + "value": 42, + }, + "sz": 1, + "id": "bthing", + }, + "st": &simpleTest{144}, // Mapping a reference + "zt": simpleTest{299}, // Mapping a missing field (zt doesn't exist) + "nt": simpleTest{3}, + } + if err := Map(&c, b); err != nil { + t.FailNow() + } + m := b["ct"].(map[string]interface{}) + n := m["st"].(map[string]interface{}) + o := b["st"].(*simpleTest) + p := b["nt"].(simpleTest) + if c.Ct.St.Value != 42 { + t.Fatalf("b not merged in properly: c.Ct.St.Value(%d) != b.Ct.St.Value(%d)", c.Ct.St.Value, n["value"]) + } + if c.St.Value != 144 { + t.Fatalf("b not merged in properly: c.St.Value(%d) != b.St.Value(%d)", c.St.Value, o.Value) + } + if c.Nt.Value != 3 { + t.Fatalf("b not merged in properly: c.Nt.Value(%d) != b.Nt.Value(%d)", c.St.Value, p.Value) + } + if c.Ct.sz == 1 { + t.Fatalf("a's private field sz not preserved from merge: c.Ct.sz(%d) == b.Ct.sz(%d)", c.Ct.sz, m["sz"]) + } + if c.Ct.ID == m["id"] { + t.Fatalf("a's field ID merged unexpectedly: c.Ct.ID(%s) == b.Ct.ID(%s)", c.Ct.ID, m["id"]) + } +} + +func TestSimpleMap(t *testing.T) { + a := simpleTest{} + b := map[string]interface{}{ + "value": 42, + } + if err := Map(&a, b); err != nil { + t.FailNow() + } + if a.Value != 42 { + t.Fatalf("b not merged in properly: a.Value(%d) != b.Value(%v)", a.Value, b["value"]) + } +} + +func TestIfcMap(t *testing.T) { + a := ifcTest{} + b := ifcTest{42} + if err := Map(&a, b); err != nil { + t.FailNow() + } + if a.I != 42 { + t.Fatalf("b not merged in properly: a.I(%d) != b.I(%d)", a.I, b.I) + } + if !reflect.DeepEqual(a, b) { + t.FailNow() + } +} + +func TestIfcMapNoOverwrite(t *testing.T) { + a := ifcTest{13} + b := ifcTest{42} + if err := Map(&a, b); err != nil { + t.FailNow() + } + if a.I != 13 { + t.Fatalf("a not left alone: a.I(%d) == b.I(%d)", a.I, b.I) + } +} + +func TestIfcMapWithOverwrite(t *testing.T) { + a := ifcTest{13} + b := ifcTest{42} + if err := MapWithOverwrite(&a, b); err != nil { + t.FailNow() + } + if a.I != 42 { + t.Fatalf("b not merged in properly: a.I(%d) != b.I(%d)", a.I, b.I) + } + if !reflect.DeepEqual(a, b) { + t.FailNow() + } +} + +type pointerMapTest struct { + A int + hidden int + B *simpleTest +} + +func TestBackAndForth(t *testing.T) { + pt := pointerMapTest{42, 1, &simpleTest{66}} + m := make(map[string]interface{}) + if err := Map(&m, pt); err != nil { + t.FailNow() + } + var ( + v interface{} + ok bool + ) + if v, ok = m["a"]; v.(int) != pt.A || !ok { + t.Fatalf("pt not merged in properly: m[`a`](%d) != pt.A(%d)", v, pt.A) + } + if v, ok = m["b"]; !ok { + t.Fatalf("pt not merged in properly: B is missing in m") + } + var st *simpleTest + if st = v.(*simpleTest); st.Value != 66 { + t.Fatalf("something went wrong while mapping pt on m, B wasn't copied") + } + bpt := pointerMapTest{} + if err := Map(&bpt, m); err != nil { + t.Fatal(err) + } + if bpt.A != pt.A { + t.Fatalf("pt not merged in properly: bpt.A(%d) != pt.A(%d)", bpt.A, pt.A) + } + if bpt.hidden == pt.hidden { + t.Fatalf("pt unexpectedly merged: bpt.hidden(%d) == pt.hidden(%d)", bpt.hidden, pt.hidden) + } + if bpt.B.Value != pt.B.Value { + t.Fatalf("pt not merged in properly: bpt.B.Value(%d) != pt.B.Value(%d)", bpt.B.Value, pt.B.Value) + } +} + +func TestEmbeddedPointerUnpacking(t *testing.T) { + tests := []struct{ input pointerMapTest }{ + {pointerMapTest{42, 1, nil}}, + {pointerMapTest{42, 1, &simpleTest{66}}}, + } + newValue := 77 + m := map[string]interface{}{ + "b": map[string]interface{}{ + "value": newValue, + }, + } + for _, test := range tests { + pt := test.input + if err := MapWithOverwrite(&pt, m); err != nil { + t.FailNow() + } + if pt.B.Value != newValue { + t.Fatalf("pt not mapped properly: pt.A.Value(%d) != m[`b`][`value`](%d)", pt.B.Value, newValue) + } + + } +} + +type structWithTimePointer struct { + Birth *time.Time +} + +func TestTime(t *testing.T) { + now := time.Now() + dataStruct := structWithTimePointer{ + Birth: &now, + } + dataMap := map[string]interface{}{ + "Birth": &now, + } + b := structWithTimePointer{} + if err := Merge(&b, dataStruct); err != nil { + t.FailNow() + } + if b.Birth.IsZero() { + t.Fatalf("time.Time not merged in properly: b.Birth(%v) != dataStruct['Birth'](%v)", b.Birth, dataStruct.Birth) + } + if b.Birth != dataStruct.Birth { + t.Fatalf("time.Time not merged in properly: b.Birth(%v) != dataStruct['Birth'](%v)", b.Birth, dataStruct.Birth) + } + b = structWithTimePointer{} + if err := Map(&b, dataMap); err != nil { + t.FailNow() + } + if b.Birth.IsZero() { + t.Fatalf("time.Time not merged in properly: b.Birth(%v) != dataMap['Birth'](%v)", b.Birth, dataMap["Birth"]) + } +} + +type simpleNested struct { + A int +} + +type structWithNestedPtrValueMap struct { + NestedPtrValue map[string]*simpleNested +} + +func TestNestedPtrValueInMap(t *testing.T) { + src := &structWithNestedPtrValueMap{ + NestedPtrValue: map[string]*simpleNested{ + "x": { + A: 1, + }, + }, + } + dst := &structWithNestedPtrValueMap{ + NestedPtrValue: map[string]*simpleNested{ + "x": {}, + }, + } + if err := Map(dst, src); err != nil { + t.FailNow() + } + if dst.NestedPtrValue["x"].A == 0 { + t.Fatalf("Nested Ptr value not merged in properly: dst.NestedPtrValue[\"x\"].A(%v) != src.NestedPtrValue[\"x\"].A(%v)", dst.NestedPtrValue["x"].A, src.NestedPtrValue["x"].A) + } +} + +func loadYAML(path string) (m map[string]interface{}) { + m = make(map[string]interface{}) + raw, _ := ioutil.ReadFile(path) + _ = yaml.Unmarshal(raw, &m) + return +} + +type structWithMap struct { + m map[string]structWithUnexportedProperty +} + +type structWithUnexportedProperty struct { + s string +} + +func TestUnexportedProperty(t *testing.T) { + a := structWithMap{map[string]structWithUnexportedProperty{ + "key": structWithUnexportedProperty{"hello"}, + }} + b := structWithMap{map[string]structWithUnexportedProperty{ + "key": structWithUnexportedProperty{"hi"}, + }} + defer func() { + if r := recover(); r != nil { + t.Errorf("Should not have panicked") + } + }() + Merge(&a, b) +} + +type structWithBoolPointer struct { + C *bool +} + +func TestBooleanPointer(t *testing.T) { + bt, bf := true, false + src := structWithBoolPointer{ + &bt, + } + dst := structWithBoolPointer{ + &bf, + } + if err := Merge(&dst, src); err != nil { + t.FailNow() + } + if dst.C == src.C { + t.Fatalf("dst.C should be a different pointer than src.C") + } + if *dst.C != *src.C { + t.Fatalf("dst.C should be true") + } +} diff --git a/vendor/github.com/imdario/mergo/testdata/license.yml b/vendor/github.com/imdario/mergo/testdata/license.yml new file mode 100644 index 000000000..2f1ad0082 --- /dev/null +++ b/vendor/github.com/imdario/mergo/testdata/license.yml @@ -0,0 +1,4 @@ +import: ../../../../fossene/db/schema/thing.yml +fields: + site: string + author: root diff --git a/vendor/github.com/imdario/mergo/testdata/thing.yml b/vendor/github.com/imdario/mergo/testdata/thing.yml new file mode 100644 index 000000000..1a7104125 --- /dev/null +++ b/vendor/github.com/imdario/mergo/testdata/thing.yml @@ -0,0 +1,6 @@ +fields: + id: int + name: string + parent: ref "datu:thing" + status: enum(draft, public, private) + author: updater diff --git a/vendor/github.com/inconshreveable/mousetrap/LICENSE b/vendor/github.com/inconshreveable/mousetrap/LICENSE new file mode 100644 index 000000000..5f0d1fb6a --- /dev/null +++ b/vendor/github.com/inconshreveable/mousetrap/LICENSE @@ -0,0 +1,13 @@ +Copyright 2014 Alan Shreve + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/vendor/github.com/inconshreveable/mousetrap/README.md b/vendor/github.com/inconshreveable/mousetrap/README.md new file mode 100644 index 000000000..7a950d177 --- /dev/null +++ b/vendor/github.com/inconshreveable/mousetrap/README.md @@ -0,0 +1,23 @@ +# mousetrap + +mousetrap is a tiny library that answers a single question. + +On a Windows machine, was the process invoked by someone double clicking on +the executable file while browsing in explorer? + +### Motivation + +Windows developers unfamiliar with command line tools will often "double-click" +the executable for a tool. Because most CLI tools print the help and then exit +when invoked without arguments, this is often very frustrating for those users. + +mousetrap provides a way to detect these invocations so that you can provide +more helpful behavior and instructions on how to run the CLI tool. To see what +this looks like, both from an organizational and a technical perspective, see +https://inconshreveable.com/09-09-2014/sweat-the-small-stuff/ + +### The interface + +The library exposes a single interface: + + func StartedByExplorer() (bool) diff --git a/vendor/github.com/inconshreveable/mousetrap/trap_others.go b/vendor/github.com/inconshreveable/mousetrap/trap_others.go new file mode 100644 index 000000000..9d2d8a4ba --- /dev/null +++ b/vendor/github.com/inconshreveable/mousetrap/trap_others.go @@ -0,0 +1,15 @@ +// +build !windows + +package mousetrap + +// StartedByExplorer returns true if the program was invoked by the user +// double-clicking on the executable from explorer.exe +// +// It is conservative and returns false if any of the internal calls fail. +// It does not guarantee that the program was run from a terminal. It only can tell you +// whether it was launched from explorer.exe +// +// On non-Windows platforms, it always returns false. +func StartedByExplorer() bool { + return false +} diff --git a/vendor/github.com/inconshreveable/mousetrap/trap_windows.go b/vendor/github.com/inconshreveable/mousetrap/trap_windows.go new file mode 100644 index 000000000..336142a5e --- /dev/null +++ b/vendor/github.com/inconshreveable/mousetrap/trap_windows.go @@ -0,0 +1,98 @@ +// +build windows +// +build !go1.4 + +package mousetrap + +import ( + "fmt" + "os" + "syscall" + "unsafe" +) + +const ( + // defined by the Win32 API + th32cs_snapprocess uintptr = 0x2 +) + +var ( + kernel = syscall.MustLoadDLL("kernel32.dll") + CreateToolhelp32Snapshot = kernel.MustFindProc("CreateToolhelp32Snapshot") + Process32First = kernel.MustFindProc("Process32FirstW") + Process32Next = kernel.MustFindProc("Process32NextW") +) + +// ProcessEntry32 structure defined by the Win32 API +type processEntry32 struct { + dwSize uint32 + cntUsage uint32 + th32ProcessID uint32 + th32DefaultHeapID int + th32ModuleID uint32 + cntThreads uint32 + th32ParentProcessID uint32 + pcPriClassBase int32 + dwFlags uint32 + szExeFile [syscall.MAX_PATH]uint16 +} + +func getProcessEntry(pid int) (pe *processEntry32, err error) { + snapshot, _, e1 := CreateToolhelp32Snapshot.Call(th32cs_snapprocess, uintptr(0)) + if snapshot == uintptr(syscall.InvalidHandle) { + err = fmt.Errorf("CreateToolhelp32Snapshot: %v", e1) + return + } + defer syscall.CloseHandle(syscall.Handle(snapshot)) + + var processEntry processEntry32 + processEntry.dwSize = uint32(unsafe.Sizeof(processEntry)) + ok, _, e1 := Process32First.Call(snapshot, uintptr(unsafe.Pointer(&processEntry))) + if ok == 0 { + err = fmt.Errorf("Process32First: %v", e1) + return + } + + for { + if processEntry.th32ProcessID == uint32(pid) { + pe = &processEntry + return + } + + ok, _, e1 = Process32Next.Call(snapshot, uintptr(unsafe.Pointer(&processEntry))) + if ok == 0 { + err = fmt.Errorf("Process32Next: %v", e1) + return + } + } +} + +func getppid() (pid int, err error) { + pe, err := getProcessEntry(os.Getpid()) + if err != nil { + return + } + + pid = int(pe.th32ParentProcessID) + return +} + +// StartedByExplorer returns true if the program was invoked by the user double-clicking +// on the executable from explorer.exe +// +// It is conservative and returns false if any of the internal calls fail. +// It does not guarantee that the program was run from a terminal. It only can tell you +// whether it was launched from explorer.exe +func StartedByExplorer() bool { + ppid, err := getppid() + if err != nil { + return false + } + + pe, err := getProcessEntry(ppid) + if err != nil { + return false + } + + name := syscall.UTF16ToString(pe.szExeFile[:]) + return name == "explorer.exe" +} diff --git a/vendor/github.com/inconshreveable/mousetrap/trap_windows_1.4.go b/vendor/github.com/inconshreveable/mousetrap/trap_windows_1.4.go new file mode 100644 index 000000000..9a28e57c3 --- /dev/null +++ b/vendor/github.com/inconshreveable/mousetrap/trap_windows_1.4.go @@ -0,0 +1,46 @@ +// +build windows +// +build go1.4 + +package mousetrap + +import ( + "os" + "syscall" + "unsafe" +) + +func getProcessEntry(pid int) (*syscall.ProcessEntry32, error) { + snapshot, err := syscall.CreateToolhelp32Snapshot(syscall.TH32CS_SNAPPROCESS, 0) + if err != nil { + return nil, err + } + defer syscall.CloseHandle(snapshot) + var procEntry syscall.ProcessEntry32 + procEntry.Size = uint32(unsafe.Sizeof(procEntry)) + if err = syscall.Process32First(snapshot, &procEntry); err != nil { + return nil, err + } + for { + if procEntry.ProcessID == uint32(pid) { + return &procEntry, nil + } + err = syscall.Process32Next(snapshot, &procEntry) + if err != nil { + return nil, err + } + } +} + +// StartedByExplorer returns true if the program was invoked by the user double-clicking +// on the executable from explorer.exe +// +// It is conservative and returns false if any of the internal calls fail. +// It does not guarantee that the program was run from a terminal. It only can tell you +// whether it was launched from explorer.exe +func StartedByExplorer() bool { + pe, err := getProcessEntry(os.Getppid()) + if err != nil { + return false + } + return "explorer.exe" == syscall.UTF16ToString(pe.ExeFile[:]) +} diff --git a/vendor/github.com/json-iterator/go/.codecov.yml b/vendor/github.com/json-iterator/go/.codecov.yml new file mode 100644 index 000000000..955dc0be5 --- /dev/null +++ b/vendor/github.com/json-iterator/go/.codecov.yml @@ -0,0 +1,3 @@ +ignore: + - "output_tests/.*" + diff --git a/vendor/github.com/json-iterator/go/.gitignore b/vendor/github.com/json-iterator/go/.gitignore new file mode 100644 index 000000000..ce242daf7 --- /dev/null +++ b/vendor/github.com/json-iterator/go/.gitignore @@ -0,0 +1,4 @@ +.idea +/coverage.txt +/profile.out +/bug_test.go diff --git a/vendor/github.com/json-iterator/go/.travis.yml b/vendor/github.com/json-iterator/go/.travis.yml new file mode 100644 index 000000000..449e67cd0 --- /dev/null +++ b/vendor/github.com/json-iterator/go/.travis.yml @@ -0,0 +1,14 @@ +language: go + +go: + - 1.8.x + - 1.x + +before_install: + - go get -t -v ./... + +script: + - ./test.sh + +after_success: + - bash <(curl -s https://codecov.io/bash) diff --git a/vendor/github.com/json-iterator/go/LICENSE b/vendor/github.com/json-iterator/go/LICENSE new file mode 100644 index 000000000..2cf4f5ab2 --- /dev/null +++ b/vendor/github.com/json-iterator/go/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2016 json-iterator + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/json-iterator/go/README.md b/vendor/github.com/json-iterator/go/README.md new file mode 100644 index 000000000..3a0d68098 --- /dev/null +++ b/vendor/github.com/json-iterator/go/README.md @@ -0,0 +1,86 @@ +[![Sourcegraph](https://sourcegraph.com/github.com/json-iterator/go/-/badge.svg)](https://sourcegraph.com/github.com/json-iterator/go?badge) +[![GoDoc](http://img.shields.io/badge/go-documentation-blue.svg?style=flat-square)](http://godoc.org/github.com/json-iterator/go) +[![Build Status](https://travis-ci.org/json-iterator/go.svg?branch=master)](https://travis-ci.org/json-iterator/go) +[![codecov](https://codecov.io/gh/json-iterator/go/branch/master/graph/badge.svg)](https://codecov.io/gh/json-iterator/go) +[![rcard](https://goreportcard.com/badge/github.com/json-iterator/go)](https://goreportcard.com/report/github.com/json-iterator/go) +[![License](http://img.shields.io/badge/license-mit-blue.svg?style=flat-square)](https://raw.githubusercontent.com/json-iterator/go/master/LICENSE) +[![Gitter chat](https://badges.gitter.im/gitterHQ/gitter.png)](https://gitter.im/json-iterator/Lobby) + +A high-performance 100% compatible drop-in replacement of "encoding/json" + +``` +Go开发者们请加入我们,滴滴出行平台技术部 taowen@didichuxing.com +``` + +# Benchmark + +![benchmark](http://jsoniter.com/benchmarks/go-benchmark.png) + +Source code: https://github.com/json-iterator/go-benchmark/blob/master/src/github.com/json-iterator/go-benchmark/benchmark_medium_payload_test.go + +Raw Result (easyjson requires static code generation) + +| | ns/op | allocation bytes | allocation times | +| --- | --- | --- | --- | +| std decode | 35510 ns/op | 1960 B/op | 99 allocs/op | +| easyjson decode | 8499 ns/op | 160 B/op | 4 allocs/op | +| jsoniter decode | 5623 ns/op | 160 B/op | 3 allocs/op | +| std encode | 2213 ns/op | 712 B/op | 5 allocs/op | +| easyjson encode | 883 ns/op | 576 B/op | 3 allocs/op | +| jsoniter encode | 837 ns/op | 384 B/op | 4 allocs/op | + +# Usage + +100% compatibility with standard lib + +Replace + +```go +import "encoding/json" +json.Marshal(&data) +``` + +with + +```go +import "github.com/json-iterator/go" + +var json = jsoniter.ConfigCompatibleWithStandardLibrary +json.Marshal(&data) +``` + +Replace + +```go +import "encoding/json" +json.Unmarshal(input, &data) +``` + +with + +```go +import "github.com/json-iterator/go" + +var json = jsoniter.ConfigCompatibleWithStandardLibrary +json.Unmarshal(input, &data) +``` + +[More documentation](http://jsoniter.com/migrate-from-go-std.html) + +# How to get + +``` +go get github.com/json-iterator/go +``` + +# Contribution Welcomed ! + +Contributors + +* [thockin](https://github.com/thockin) +* [mattn](https://github.com/mattn) +* [cch123](https://github.com/cch123) +* [Oleg Shaldybin](https://github.com/olegshaldybin) +* [Jason Toffaletti](https://github.com/toffaletti) + +Report issue or pull request, or email taowen@gmail.com, or [![Gitter chat](https://badges.gitter.im/gitterHQ/gitter.png)](https://gitter.im/json-iterator/Lobby) diff --git a/vendor/github.com/json-iterator/go/compatible_test.go b/vendor/github.com/json-iterator/go/compatible_test.go new file mode 100644 index 000000000..4b725c536 --- /dev/null +++ b/vendor/github.com/json-iterator/go/compatible_test.go @@ -0,0 +1,40 @@ +package jsoniter + +import ( + "bytes" + "encoding/json" + "testing" + + "github.com/stretchr/testify/require" +) + +// Standard Encoder has trailing newline. +func TestEncoderHasTrailingNewline(t *testing.T) { + should := require.New(t) + var buf, stdbuf bytes.Buffer + enc := ConfigCompatibleWithStandardLibrary.NewEncoder(&buf) + enc.Encode(1) + stdenc := json.NewEncoder(&stdbuf) + stdenc.Encode(1) + should.Equal(stdbuf.Bytes(), buf.Bytes()) +} + +// Non-nil but empty map should be ignored. +func TestOmitempty(t *testing.T) { + o := struct { + A string `json:"a,omitempty"` + B string `json:"b,omitempty"` + Annotations map[string]string `json:"annotations,omitempty"` + }{ + A: "a", + B: "b", + Annotations: map[string]string{}, + } + should := require.New(t) + var buf, stdbuf bytes.Buffer + enc := ConfigCompatibleWithStandardLibrary.NewEncoder(&buf) + enc.Encode(o) + stdenc := json.NewEncoder(&stdbuf) + stdenc.Encode(o) + should.Equal(string(stdbuf.Bytes()), string(buf.Bytes())) +} diff --git a/vendor/github.com/json-iterator/go/example_test.go b/vendor/github.com/json-iterator/go/example_test.go new file mode 100644 index 000000000..1c8f341c1 --- /dev/null +++ b/vendor/github.com/json-iterator/go/example_test.go @@ -0,0 +1,95 @@ +package jsoniter + +import ( + "fmt" + "os" +) + +func ExampleMarshal() { + type ColorGroup struct { + ID int + Name string + Colors []string + } + group := ColorGroup{ + ID: 1, + Name: "Reds", + Colors: []string{"Crimson", "Red", "Ruby", "Maroon"}, + } + b, err := Marshal(group) + if err != nil { + fmt.Println("error:", err) + } + os.Stdout.Write(b) + // Output: + // {"ID":1,"Name":"Reds","Colors":["Crimson","Red","Ruby","Maroon"]} +} + +func ExampleUnmarshal() { + var jsonBlob = []byte(`[ + {"Name": "Platypus", "Order": "Monotremata"}, + {"Name": "Quoll", "Order": "Dasyuromorphia"} + ]`) + type Animal struct { + Name string + Order string + } + var animals []Animal + err := Unmarshal(jsonBlob, &animals) + if err != nil { + fmt.Println("error:", err) + } + fmt.Printf("%+v", animals) + // Output: + // [{Name:Platypus Order:Monotremata} {Name:Quoll Order:Dasyuromorphia}] +} + +func ExampleConfigFastest_Marshal() { + type ColorGroup struct { + ID int + Name string + Colors []string + } + group := ColorGroup{ + ID: 1, + Name: "Reds", + Colors: []string{"Crimson", "Red", "Ruby", "Maroon"}, + } + stream := ConfigFastest.BorrowStream(nil) + defer ConfigFastest.ReturnStream(stream) + stream.WriteVal(group) + if stream.Error != nil { + fmt.Println("error:", stream.Error) + } + os.Stdout.Write(stream.Buffer()) + // Output: + // {"ID":1,"Name":"Reds","Colors":["Crimson","Red","Ruby","Maroon"]} +} + +func ExampleConfigFastest_Unmarshal() { + var jsonBlob = []byte(`[ + {"Name": "Platypus", "Order": "Monotremata"}, + {"Name": "Quoll", "Order": "Dasyuromorphia"} + ]`) + type Animal struct { + Name string + Order string + } + var animals []Animal + iter := ConfigFastest.BorrowIterator(jsonBlob) + defer ConfigFastest.ReturnIterator(iter) + iter.ReadVal(&animals) + if iter.Error != nil { + fmt.Println("error:", iter.Error) + } + fmt.Printf("%+v", animals) + // Output: + // [{Name:Platypus Order:Monotremata} {Name:Quoll Order:Dasyuromorphia}] +} + +func ExampleGet() { + val := []byte(`{"ID":1,"Name":"Reds","Colors":["Crimson","Red","Ruby","Maroon"]}`) + fmt.Printf(Get(val, "Colors", 0).ToString()) + // Output: + // Crimson +} diff --git a/vendor/github.com/json-iterator/go/extra/fuzzy_decoder.go b/vendor/github.com/json-iterator/go/extra/fuzzy_decoder.go new file mode 100644 index 000000000..9b63c277f --- /dev/null +++ b/vendor/github.com/json-iterator/go/extra/fuzzy_decoder.go @@ -0,0 +1,278 @@ +package extra + +import ( + "encoding/json" + "io" + "math" + "reflect" + "strings" + "unsafe" + + "github.com/json-iterator/go" +) + +const maxUint = ^uint(0) +const maxInt = int(maxUint >> 1) +const minInt = -maxInt - 1 + +// RegisterFuzzyDecoders decode input from PHP with tolerance. +// It will handle string/number auto conversation, and treat empty [] as empty struct. +func RegisterFuzzyDecoders() { + jsoniter.RegisterExtension(&tolerateEmptyArrayExtension{}) + jsoniter.RegisterTypeDecoder("string", &fuzzyStringDecoder{}) + jsoniter.RegisterTypeDecoder("float32", &fuzzyFloat32Decoder{}) + jsoniter.RegisterTypeDecoder("float64", &fuzzyFloat64Decoder{}) + jsoniter.RegisterTypeDecoder("int", &fuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) { + if isFloat { + val := iter.ReadFloat64() + if val > float64(maxInt) || val < float64(minInt) { + iter.ReportError("fuzzy decode int", "exceed range") + return + } + *((*int)(ptr)) = int(val) + } else { + *((*int)(ptr)) = iter.ReadInt() + } + }}) + jsoniter.RegisterTypeDecoder("uint", &fuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) { + if isFloat { + val := iter.ReadFloat64() + if val > float64(maxUint) || val < 0 { + iter.ReportError("fuzzy decode uint", "exceed range") + return + } + *((*uint)(ptr)) = uint(val) + } else { + *((*uint)(ptr)) = iter.ReadUint() + } + }}) + jsoniter.RegisterTypeDecoder("int8", &fuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) { + if isFloat { + val := iter.ReadFloat64() + if val > float64(math.MaxInt8) || val < float64(math.MinInt8) { + iter.ReportError("fuzzy decode int8", "exceed range") + return + } + *((*int8)(ptr)) = int8(val) + } else { + *((*int8)(ptr)) = iter.ReadInt8() + } + }}) + jsoniter.RegisterTypeDecoder("uint8", &fuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) { + if isFloat { + val := iter.ReadFloat64() + if val > float64(math.MaxUint8) || val < 0 { + iter.ReportError("fuzzy decode uint8", "exceed range") + return + } + *((*uint8)(ptr)) = uint8(val) + } else { + *((*uint8)(ptr)) = iter.ReadUint8() + } + }}) + jsoniter.RegisterTypeDecoder("int16", &fuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) { + if isFloat { + val := iter.ReadFloat64() + if val > float64(math.MaxInt16) || val < float64(math.MinInt16) { + iter.ReportError("fuzzy decode int16", "exceed range") + return + } + *((*int16)(ptr)) = int16(val) + } else { + *((*int16)(ptr)) = iter.ReadInt16() + } + }}) + jsoniter.RegisterTypeDecoder("uint16", &fuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) { + if isFloat { + val := iter.ReadFloat64() + if val > float64(math.MaxUint16) || val < 0 { + iter.ReportError("fuzzy decode uint16", "exceed range") + return + } + *((*uint16)(ptr)) = uint16(val) + } else { + *((*uint16)(ptr)) = iter.ReadUint16() + } + }}) + jsoniter.RegisterTypeDecoder("int32", &fuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) { + if isFloat { + val := iter.ReadFloat64() + if val > float64(math.MaxInt32) || val < float64(math.MinInt32) { + iter.ReportError("fuzzy decode int32", "exceed range") + return + } + *((*int32)(ptr)) = int32(val) + } else { + *((*int32)(ptr)) = iter.ReadInt32() + } + }}) + jsoniter.RegisterTypeDecoder("uint32", &fuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) { + if isFloat { + val := iter.ReadFloat64() + if val > float64(math.MaxUint32) || val < 0 { + iter.ReportError("fuzzy decode uint32", "exceed range") + return + } + *((*uint32)(ptr)) = uint32(val) + } else { + *((*uint32)(ptr)) = iter.ReadUint32() + } + }}) + jsoniter.RegisterTypeDecoder("int64", &fuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) { + if isFloat { + val := iter.ReadFloat64() + if val > float64(math.MaxInt64) || val < float64(math.MinInt64) { + iter.ReportError("fuzzy decode int64", "exceed range") + return + } + *((*int64)(ptr)) = int64(val) + } else { + *((*int64)(ptr)) = iter.ReadInt64() + } + }}) + jsoniter.RegisterTypeDecoder("uint64", &fuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) { + if isFloat { + val := iter.ReadFloat64() + if val > float64(math.MaxUint64) || val < 0 { + iter.ReportError("fuzzy decode uint64", "exceed range") + return + } + *((*uint64)(ptr)) = uint64(val) + } else { + *((*uint64)(ptr)) = iter.ReadUint64() + } + }}) +} + +type tolerateEmptyArrayExtension struct { + jsoniter.DummyExtension +} + +func (extension *tolerateEmptyArrayExtension) DecorateDecoder(typ reflect.Type, decoder jsoniter.ValDecoder) jsoniter.ValDecoder { + if typ.Kind() == reflect.Struct || typ.Kind() == reflect.Map { + return &tolerateEmptyArrayDecoder{decoder} + } + return decoder +} + +type tolerateEmptyArrayDecoder struct { + valDecoder jsoniter.ValDecoder +} + +func (decoder *tolerateEmptyArrayDecoder) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator) { + if iter.WhatIsNext() == jsoniter.ArrayValue { + iter.Skip() + newIter := iter.Pool().BorrowIterator([]byte("{}")) + defer iter.Pool().ReturnIterator(newIter) + decoder.valDecoder.Decode(ptr, newIter) + } else { + decoder.valDecoder.Decode(ptr, iter) + } +} + +type fuzzyStringDecoder struct { +} + +func (decoder *fuzzyStringDecoder) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator) { + valueType := iter.WhatIsNext() + switch valueType { + case jsoniter.NumberValue: + var number json.Number + iter.ReadVal(&number) + *((*string)(ptr)) = string(number) + case jsoniter.StringValue: + *((*string)(ptr)) = iter.ReadString() + default: + iter.ReportError("fuzzyStringDecoder", "not number or string") + } +} + +type fuzzyIntegerDecoder struct { + fun func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) +} + +func (decoder *fuzzyIntegerDecoder) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator) { + valueType := iter.WhatIsNext() + var str string + switch valueType { + case jsoniter.NumberValue: + var number json.Number + iter.ReadVal(&number) + str = string(number) + case jsoniter.StringValue: + str = iter.ReadString() + case jsoniter.BoolValue: + if iter.ReadBool() { + str = "1" + } else { + str = "0" + } + default: + iter.ReportError("fuzzyIntegerDecoder", "not number or string") + } + newIter := iter.Pool().BorrowIterator([]byte(str)) + defer iter.Pool().ReturnIterator(newIter) + isFloat := strings.IndexByte(str, '.') != -1 + decoder.fun(isFloat, ptr, newIter) + if newIter.Error != nil && newIter.Error != io.EOF { + iter.Error = newIter.Error + } +} + +type fuzzyFloat32Decoder struct { +} + +func (decoder *fuzzyFloat32Decoder) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator) { + valueType := iter.WhatIsNext() + var str string + switch valueType { + case jsoniter.NumberValue: + *((*float32)(ptr)) = iter.ReadFloat32() + case jsoniter.StringValue: + str = iter.ReadString() + newIter := iter.Pool().BorrowIterator([]byte(str)) + defer iter.Pool().ReturnIterator(newIter) + *((*float32)(ptr)) = newIter.ReadFloat32() + if newIter.Error != nil && newIter.Error != io.EOF { + iter.Error = newIter.Error + } + case jsoniter.BoolValue: + // support bool to float32 + if iter.ReadBool() { + *((*float32)(ptr)) = 1 + } else { + *((*float32)(ptr)) = 0 + } + default: + iter.ReportError("fuzzyFloat32Decoder", "not number or string") + } +} + +type fuzzyFloat64Decoder struct { +} + +func (decoder *fuzzyFloat64Decoder) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator) { + valueType := iter.WhatIsNext() + var str string + switch valueType { + case jsoniter.NumberValue: + *((*float64)(ptr)) = iter.ReadFloat64() + case jsoniter.StringValue: + str = iter.ReadString() + newIter := iter.Pool().BorrowIterator([]byte(str)) + defer iter.Pool().ReturnIterator(newIter) + *((*float64)(ptr)) = newIter.ReadFloat64() + if newIter.Error != nil && newIter.Error != io.EOF { + iter.Error = newIter.Error + } + case jsoniter.BoolValue: + // support bool to float64 + if iter.ReadBool() { + *((*float64)(ptr)) = 1 + } else { + *((*float64)(ptr)) = 0 + } + default: + iter.ReportError("fuzzyFloat32Decoder", "not number or string") + } +} diff --git a/vendor/github.com/json-iterator/go/extra/fuzzy_decoder_test.go b/vendor/github.com/json-iterator/go/extra/fuzzy_decoder_test.go new file mode 100644 index 000000000..b6a15596d --- /dev/null +++ b/vendor/github.com/json-iterator/go/extra/fuzzy_decoder_test.go @@ -0,0 +1,359 @@ +package extra + +import ( + "testing" + + "github.com/json-iterator/go" + "github.com/stretchr/testify/require" +) + +func init() { + RegisterFuzzyDecoders() +} + +func Test_any_to_string(t *testing.T) { + should := require.New(t) + var val string + should.Nil(jsoniter.UnmarshalFromString(`"100"`, &val)) + should.Equal("100", val) + should.Nil(jsoniter.UnmarshalFromString("10", &val)) + should.Equal("10", val) + should.Nil(jsoniter.UnmarshalFromString("10.1", &val)) + should.Equal("10.1", val) + should.Nil(jsoniter.UnmarshalFromString(`"10.1"`, &val)) + should.Equal("10.1", val) + should.NotNil(jsoniter.UnmarshalFromString("{}", &val)) + should.NotNil(jsoniter.UnmarshalFromString("[]", &val)) +} +func Test_any_to_int64(t *testing.T) { + should := require.New(t) + var val int64 + + should.Nil(jsoniter.UnmarshalFromString(`"100"`, &val)) + should.Equal(int64(100), val) + should.Nil(jsoniter.UnmarshalFromString(`"10.1"`, &val)) + should.Equal(int64(10), val) + should.Nil(jsoniter.UnmarshalFromString(`10.1`, &val)) + should.Equal(int64(10), val) + should.Nil(jsoniter.UnmarshalFromString(`10`, &val)) + should.Equal(int64(10), val) + + // bool part + should.Nil(jsoniter.UnmarshalFromString(`false`, &val)) + should.Equal(int64(0), val) + should.Nil(jsoniter.UnmarshalFromString(`true`, &val)) + should.Equal(int64(1), val) + + should.Nil(jsoniter.UnmarshalFromString(`-10`, &val)) + should.Equal(int64(-10), val) + should.NotNil(jsoniter.UnmarshalFromString("{}", &val)) + should.NotNil(jsoniter.UnmarshalFromString("[]", &val)) + // large float to int + should.NotNil(jsoniter.UnmarshalFromString(`1234512345123451234512345.0`, &val)) +} + +func Test_any_to_int(t *testing.T) { + should := require.New(t) + var val int + should.Nil(jsoniter.UnmarshalFromString(`"100"`, &val)) + should.Equal(100, val) + should.Nil(jsoniter.UnmarshalFromString(`"10.1"`, &val)) + should.Equal(10, val) + should.Nil(jsoniter.UnmarshalFromString(`10.1`, &val)) + should.Equal(10, val) + should.Nil(jsoniter.UnmarshalFromString(`10`, &val)) + should.Equal(10, val) + + // bool part + should.Nil(jsoniter.UnmarshalFromString(`false`, &val)) + should.Equal(0, val) + should.Nil(jsoniter.UnmarshalFromString(`true`, &val)) + should.Equal(1, val) + + should.NotNil(jsoniter.UnmarshalFromString("{}", &val)) + should.NotNil(jsoniter.UnmarshalFromString("[]", &val)) + // large float to int + should.NotNil(jsoniter.UnmarshalFromString(`1234512345123451234512345.0`, &val)) +} + +func Test_any_to_int16(t *testing.T) { + should := require.New(t) + var val int16 + should.Nil(jsoniter.UnmarshalFromString(`"100"`, &val)) + should.Equal(int16(100), val) + should.Nil(jsoniter.UnmarshalFromString(`"10.1"`, &val)) + should.Equal(int16(10), val) + should.Nil(jsoniter.UnmarshalFromString(`10.1`, &val)) + should.Equal(int16(10), val) + should.Nil(jsoniter.UnmarshalFromString(`10`, &val)) + should.Equal(int16(10), val) + + // bool part + should.Nil(jsoniter.UnmarshalFromString(`false`, &val)) + should.Equal(int16(0), val) + should.Nil(jsoniter.UnmarshalFromString(`true`, &val)) + should.Equal(int16(1), val) + + should.NotNil(jsoniter.UnmarshalFromString("{}", &val)) + should.NotNil(jsoniter.UnmarshalFromString("[]", &val)) + // large float to int + should.NotNil(jsoniter.UnmarshalFromString(`1234512345123451234512345.0`, &val)) +} + +func Test_any_to_int32(t *testing.T) { + should := require.New(t) + var val int32 + should.Nil(jsoniter.UnmarshalFromString(`"100"`, &val)) + should.Equal(int32(100), val) + should.Nil(jsoniter.UnmarshalFromString(`"10.1"`, &val)) + should.Equal(int32(10), val) + should.Nil(jsoniter.UnmarshalFromString(`10.1`, &val)) + should.Equal(int32(10), val) + should.Nil(jsoniter.UnmarshalFromString(`10`, &val)) + should.Equal(int32(10), val) + + // bool part + should.Nil(jsoniter.UnmarshalFromString(`false`, &val)) + should.Equal(int32(0), val) + should.Nil(jsoniter.UnmarshalFromString(`true`, &val)) + should.Equal(int32(1), val) + + should.NotNil(jsoniter.UnmarshalFromString("{}", &val)) + should.NotNil(jsoniter.UnmarshalFromString("[]", &val)) + // large float to int + should.NotNil(jsoniter.UnmarshalFromString(`1234512345123451234512345.0`, &val)) +} + +func Test_any_to_int8(t *testing.T) { + should := require.New(t) + var val int8 + should.Nil(jsoniter.UnmarshalFromString(`"100"`, &val)) + should.Equal(int8(100), val) + should.Nil(jsoniter.UnmarshalFromString(`"10.1"`, &val)) + should.Equal(int8(10), val) + should.Nil(jsoniter.UnmarshalFromString(`10.1`, &val)) + should.Equal(int8(10), val) + should.Nil(jsoniter.UnmarshalFromString(`10`, &val)) + should.Equal(int8(10), val) + + // bool part + should.Nil(jsoniter.UnmarshalFromString(`false`, &val)) + should.Equal(int8(0), val) + should.Nil(jsoniter.UnmarshalFromString(`true`, &val)) + should.Equal(int8(1), val) + + should.NotNil(jsoniter.UnmarshalFromString("{}", &val)) + should.NotNil(jsoniter.UnmarshalFromString("[]", &val)) + // large float to int + should.NotNil(jsoniter.UnmarshalFromString(`1234512345123451234512345.0`, &val)) +} + +func Test_any_to_uint8(t *testing.T) { + should := require.New(t) + var val uint8 + should.Nil(jsoniter.UnmarshalFromString(`"100"`, &val)) + should.Equal(uint8(100), val) + should.Nil(jsoniter.UnmarshalFromString(`"10.1"`, &val)) + should.Equal(uint8(10), val) + should.Nil(jsoniter.UnmarshalFromString(`10.1`, &val)) + should.Equal(uint8(10), val) + should.Nil(jsoniter.UnmarshalFromString(`10`, &val)) + should.Equal(uint8(10), val) + + // bool part + should.Nil(jsoniter.UnmarshalFromString(`false`, &val)) + should.Equal(uint8(0), val) + should.Nil(jsoniter.UnmarshalFromString(`true`, &val)) + should.Equal(uint8(1), val) + + should.NotNil(jsoniter.UnmarshalFromString("{}", &val)) + should.NotNil(jsoniter.UnmarshalFromString("[]", &val)) + // large float to int + should.NotNil(jsoniter.UnmarshalFromString(`1234512345123451234512345.0`, &val)) +} + +func Test_any_to_uint64(t *testing.T) { + should := require.New(t) + var val uint64 + + should.Nil(jsoniter.UnmarshalFromString(`"100"`, &val)) + should.Equal(uint64(100), val) + should.Nil(jsoniter.UnmarshalFromString(`"10.1"`, &val)) + should.Equal(uint64(10), val) + should.Nil(jsoniter.UnmarshalFromString(`10.1`, &val)) + should.Equal(uint64(10), val) + should.Nil(jsoniter.UnmarshalFromString(`10`, &val)) + should.Equal(uint64(10), val) + + // bool part + should.Nil(jsoniter.UnmarshalFromString(`false`, &val)) + should.Equal(uint64(0), val) + should.Nil(jsoniter.UnmarshalFromString(`true`, &val)) + should.Equal(uint64(1), val) + + // TODO fix? + should.NotNil(jsoniter.UnmarshalFromString(`-10`, &val)) + should.Equal(uint64(0), val) + should.NotNil(jsoniter.UnmarshalFromString("{}", &val)) + should.NotNil(jsoniter.UnmarshalFromString("[]", &val)) + // large float to int + should.NotNil(jsoniter.UnmarshalFromString(`1234512345123451234512345.0`, &val)) +} +func Test_any_to_uint32(t *testing.T) { + should := require.New(t) + var val uint32 + + should.Nil(jsoniter.UnmarshalFromString(`"100"`, &val)) + should.Equal(uint32(100), val) + should.Nil(jsoniter.UnmarshalFromString(`"10.1"`, &val)) + should.Equal(uint32(10), val) + should.Nil(jsoniter.UnmarshalFromString(`10.1`, &val)) + should.Equal(uint32(10), val) + should.Nil(jsoniter.UnmarshalFromString(`10`, &val)) + should.Equal(uint32(10), val) + + // bool part + should.Nil(jsoniter.UnmarshalFromString(`false`, &val)) + should.Equal(uint32(0), val) + should.Nil(jsoniter.UnmarshalFromString(`true`, &val)) + should.Equal(uint32(1), val) + + // TODO fix? + should.NotNil(jsoniter.UnmarshalFromString(`-10`, &val)) + should.Equal(uint32(0), val) + should.NotNil(jsoniter.UnmarshalFromString("{}", &val)) + should.NotNil(jsoniter.UnmarshalFromString("[]", &val)) + // large float to int + should.NotNil(jsoniter.UnmarshalFromString(`1234512345123451234512345.0`, &val)) +} +func Test_any_to_uint16(t *testing.T) { + should := require.New(t) + var val uint16 + + should.Nil(jsoniter.UnmarshalFromString(`"100"`, &val)) + should.Equal(uint16(100), val) + should.Nil(jsoniter.UnmarshalFromString(`"10.1"`, &val)) + should.Equal(uint16(10), val) + should.Nil(jsoniter.UnmarshalFromString(`10.1`, &val)) + should.Equal(uint16(10), val) + should.Nil(jsoniter.UnmarshalFromString(`10`, &val)) + should.Equal(uint16(10), val) + + // bool part + should.Nil(jsoniter.UnmarshalFromString(`false`, &val)) + should.Equal(uint16(0), val) + should.Nil(jsoniter.UnmarshalFromString(`true`, &val)) + should.Equal(uint16(1), val) + + // TODO fix? + should.NotNil(jsoniter.UnmarshalFromString(`-10`, &val)) + should.Equal(uint16(0), val) + should.NotNil(jsoniter.UnmarshalFromString("{}", &val)) + should.NotNil(jsoniter.UnmarshalFromString("[]", &val)) + // large float to int + should.NotNil(jsoniter.UnmarshalFromString(`1234512345123451234512345.0`, &val)) +} +func Test_any_to_uint(t *testing.T) { + should := require.New(t) + var val uint + should.Nil(jsoniter.UnmarshalFromString(`"100"`, &val)) + should.Equal(uint(100), val) + should.Nil(jsoniter.UnmarshalFromString(`"10.1"`, &val)) + should.Equal(uint(10), val) + should.Nil(jsoniter.UnmarshalFromString(`10.1`, &val)) + should.Equal(uint(10), val) + should.Nil(jsoniter.UnmarshalFromString(`10`, &val)) + should.Equal(uint(10), val) + + should.Nil(jsoniter.UnmarshalFromString(`false`, &val)) + should.Equal(uint(0), val) + should.Nil(jsoniter.UnmarshalFromString(`true`, &val)) + should.Equal(uint(1), val) + + should.NotNil(jsoniter.UnmarshalFromString("{}", &val)) + should.NotNil(jsoniter.UnmarshalFromString("[]", &val)) + // large float to int + should.NotNil(jsoniter.UnmarshalFromString(`1234512345123451234512345.0`, &val)) +} + +func Test_any_to_float32(t *testing.T) { + should := require.New(t) + var val float32 + should.Nil(jsoniter.UnmarshalFromString(`"100"`, &val)) + should.Equal(float32(100), val) + + should.Nil(jsoniter.UnmarshalFromString(`"10.1"`, &val)) + should.Equal(float32(10.1), val) + should.Nil(jsoniter.UnmarshalFromString(`10.1`, &val)) + should.Equal(float32(10.1), val) + should.Nil(jsoniter.UnmarshalFromString(`10`, &val)) + should.Equal(float32(10), val) + + // bool part + should.Nil(jsoniter.UnmarshalFromString(`false`, &val)) + should.Equal(float32(0), val) + should.Nil(jsoniter.UnmarshalFromString(`true`, &val)) + should.Equal(float32(1), val) + + should.NotNil(jsoniter.UnmarshalFromString("{}", &val)) + should.NotNil(jsoniter.UnmarshalFromString("[]", &val)) +} + +func Test_any_to_float64(t *testing.T) { + should := require.New(t) + var val float64 + + should.Nil(jsoniter.UnmarshalFromString(`"100"`, &val)) + should.Equal(float64(100), val) + + should.Nil(jsoniter.UnmarshalFromString(`"10.1"`, &val)) + should.Equal(float64(10.1), val) + should.Nil(jsoniter.UnmarshalFromString(`10.1`, &val)) + should.Equal(float64(10.1), val) + should.Nil(jsoniter.UnmarshalFromString(`10`, &val)) + should.Equal(float64(10), val) + + // bool part + should.Nil(jsoniter.UnmarshalFromString(`false`, &val)) + should.Equal(float64(0), val) + should.Nil(jsoniter.UnmarshalFromString(`true`, &val)) + should.Equal(float64(1), val) + + should.NotNil(jsoniter.UnmarshalFromString("{}", &val)) + should.NotNil(jsoniter.UnmarshalFromString("[]", &val)) +} + +func Test_empty_array_as_map(t *testing.T) { + should := require.New(t) + var val map[string]interface{} + should.Nil(jsoniter.UnmarshalFromString(`[]`, &val)) + should.Equal(map[string]interface{}{}, val) +} + +func Test_empty_array_as_object(t *testing.T) { + should := require.New(t) + var val struct{} + should.Nil(jsoniter.UnmarshalFromString(`[]`, &val)) + should.Equal(struct{}{}, val) +} + +func Test_bad_case(t *testing.T) { + var jsonstr = ` +{ + "extra_type": 181760, + "combo_type": 0, + "trigger_time_ms": 1498800398000, + "_create_time": "2017-06-16 11:21:39", + "_msg_type": 41000 +} +` + + type OrderEventRequestParams struct { + ExtraType uint64 `json:"extra_type"` + } + + var a OrderEventRequestParams + err := jsoniter.UnmarshalFromString(jsonstr, &a) + should := require.New(t) + should.Nil(err) +} diff --git a/vendor/github.com/json-iterator/go/extra/naming_strategy.go b/vendor/github.com/json-iterator/go/extra/naming_strategy.go new file mode 100644 index 000000000..642556fed --- /dev/null +++ b/vendor/github.com/json-iterator/go/extra/naming_strategy.go @@ -0,0 +1,41 @@ +package extra + +import ( + "github.com/json-iterator/go" + "unicode" +) + +// SetNamingStrategy rename struct fields uniformly +func SetNamingStrategy(translate func(string) string) { + jsoniter.RegisterExtension(&namingStrategyExtension{jsoniter.DummyExtension{}, translate}) +} + +type namingStrategyExtension struct { + jsoniter.DummyExtension + translate func(string) string +} + +func (extension *namingStrategyExtension) UpdateStructDescriptor(structDescriptor *jsoniter.StructDescriptor) { + for _, binding := range structDescriptor.Fields { + binding.ToNames = []string{extension.translate(binding.Field.Name)} + binding.FromNames = []string{extension.translate(binding.Field.Name)} + } +} + +// LowerCaseWithUnderscores one strategy to SetNamingStrategy for. It will change HelloWorld to hello_world. +func LowerCaseWithUnderscores(name string) string { + newName := []rune{} + for i, c := range name { + if i == 0 { + newName = append(newName, unicode.ToLower(c)) + } else { + if unicode.IsUpper(c) { + newName = append(newName, '_') + newName = append(newName, unicode.ToLower(c)) + } else { + newName = append(newName, c) + } + } + } + return string(newName) +} diff --git a/vendor/github.com/json-iterator/go/extra/naming_strategy_test.go b/vendor/github.com/json-iterator/go/extra/naming_strategy_test.go new file mode 100644 index 000000000..d15608126 --- /dev/null +++ b/vendor/github.com/json-iterator/go/extra/naming_strategy_test.go @@ -0,0 +1,23 @@ +package extra + +import ( + "github.com/json-iterator/go" + "github.com/stretchr/testify/require" + "testing" +) + +func Test_lower_case_with_underscores(t *testing.T) { + should := require.New(t) + should.Equal("hello_world", LowerCaseWithUnderscores("helloWorld")) + should.Equal("hello_world", LowerCaseWithUnderscores("HelloWorld")) + SetNamingStrategy(LowerCaseWithUnderscores) + output, err := jsoniter.Marshal(struct { + UserName string + FirstLanguage string + }{ + UserName: "taowen", + FirstLanguage: "Chinese", + }) + should.Nil(err) + should.Equal(`{"user_name":"taowen","first_language":"Chinese"}`, string(output)) +} diff --git a/vendor/github.com/json-iterator/go/extra/privat_fields.go b/vendor/github.com/json-iterator/go/extra/privat_fields.go new file mode 100644 index 000000000..18643961f --- /dev/null +++ b/vendor/github.com/json-iterator/go/extra/privat_fields.go @@ -0,0 +1,25 @@ +package extra + +import ( + "github.com/json-iterator/go" + "unicode" +) + +// SupportPrivateFields include private fields when encoding/decoding +func SupportPrivateFields() { + jsoniter.RegisterExtension(&privateFieldsExtension{}) +} + +type privateFieldsExtension struct { + jsoniter.DummyExtension +} + +func (extension *privateFieldsExtension) UpdateStructDescriptor(structDescriptor *jsoniter.StructDescriptor) { + for _, binding := range structDescriptor.Fields { + isPrivate := unicode.IsLower(rune(binding.Field.Name[0])) + if isPrivate { + binding.FromNames = []string{binding.Field.Name} + binding.ToNames = []string{binding.Field.Name} + } + } +} diff --git a/vendor/github.com/json-iterator/go/extra/private_fields_test.go b/vendor/github.com/json-iterator/go/extra/private_fields_test.go new file mode 100644 index 000000000..d3bc40dac --- /dev/null +++ b/vendor/github.com/json-iterator/go/extra/private_fields_test.go @@ -0,0 +1,18 @@ +package extra + +import ( + "github.com/json-iterator/go" + "github.com/stretchr/testify/require" + "testing" +) + +func Test_private_fields(t *testing.T) { + type TestObject struct { + field1 string + } + SupportPrivateFields() + should := require.New(t) + obj := TestObject{} + should.Nil(jsoniter.UnmarshalFromString(`{"field1":"Hello"}`, &obj)) + should.Equal("Hello", obj.field1) +} diff --git a/vendor/github.com/json-iterator/go/extra/time_as_int64_codec.go b/vendor/github.com/json-iterator/go/extra/time_as_int64_codec.go new file mode 100644 index 000000000..e270cd403 --- /dev/null +++ b/vendor/github.com/json-iterator/go/extra/time_as_int64_codec.go @@ -0,0 +1,34 @@ +package extra + +import ( + "github.com/json-iterator/go" + "time" + "unsafe" +) + +// RegisterTimeAsInt64Codec encode/decode time since number of unit since epoch. the precision is the unit. +func RegisterTimeAsInt64Codec(precision time.Duration) { + jsoniter.RegisterTypeEncoder("time.Time", &timeAsInt64Codec{precision}) + jsoniter.RegisterTypeDecoder("time.Time", &timeAsInt64Codec{precision}) +} + +type timeAsInt64Codec struct { + precision time.Duration +} + +func (codec *timeAsInt64Codec) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator) { + nanoseconds := iter.ReadInt64() * codec.precision.Nanoseconds() + *((*time.Time)(ptr)) = time.Unix(0, nanoseconds) +} + +func (codec *timeAsInt64Codec) IsEmpty(ptr unsafe.Pointer) bool { + ts := *((*time.Time)(ptr)) + return ts.UnixNano() == 0 +} +func (codec *timeAsInt64Codec) Encode(ptr unsafe.Pointer, stream *jsoniter.Stream) { + ts := *((*time.Time)(ptr)) + stream.WriteInt64(ts.UnixNano() / codec.precision.Nanoseconds()) +} +func (codec *timeAsInt64Codec) EncodeInterface(val interface{}, stream *jsoniter.Stream) { + jsoniter.WriteToStream(val, stream, codec) +} diff --git a/vendor/github.com/json-iterator/go/extra/time_as_int64_codec_test.go b/vendor/github.com/json-iterator/go/extra/time_as_int64_codec_test.go new file mode 100644 index 000000000..b27220f18 --- /dev/null +++ b/vendor/github.com/json-iterator/go/extra/time_as_int64_codec_test.go @@ -0,0 +1,31 @@ +package extra + +import ( + "github.com/json-iterator/go" + "github.com/stretchr/testify/require" + "testing" + "time" +) + +func Test_time_as_int64(t *testing.T) { + should := require.New(t) + RegisterTimeAsInt64Codec(time.Nanosecond) + output, err := jsoniter.Marshal(time.Unix(1497952257, 1002)) + should.Nil(err) + should.Equal("1497952257000001002", string(output)) + var val time.Time + should.Nil(jsoniter.Unmarshal(output, &val)) + should.Equal(int64(1497952257000001002), val.UnixNano()) +} + +func Test_time_as_int64_keep_microsecond(t *testing.T) { + t.Skip("conflict") + should := require.New(t) + RegisterTimeAsInt64Codec(time.Microsecond) + output, err := jsoniter.Marshal(time.Unix(1, 1002)) + should.Nil(err) + should.Equal("1000001", string(output)) + var val time.Time + should.Nil(jsoniter.Unmarshal(output, &val)) + should.Equal(int64(1000001000), val.UnixNano()) +} diff --git a/vendor/github.com/json-iterator/go/feature_adapter.go b/vendor/github.com/json-iterator/go/feature_adapter.go new file mode 100644 index 000000000..0214b711a --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_adapter.go @@ -0,0 +1,133 @@ +package jsoniter + +import ( + "bytes" + "io" +) + +// RawMessage to make replace json with jsoniter +type RawMessage []byte + +// Unmarshal adapts to json/encoding Unmarshal API +// +// Unmarshal parses the JSON-encoded data and stores the result in the value pointed to by v. +// Refer to https://godoc.org/encoding/json#Unmarshal for more information +func Unmarshal(data []byte, v interface{}) error { + return ConfigDefault.Unmarshal(data, v) +} + +func lastNotSpacePos(data []byte) int { + for i := len(data) - 1; i >= 0; i-- { + if data[i] != ' ' && data[i] != '\t' && data[i] != '\r' && data[i] != '\n' { + return i + 1 + } + } + return 0 +} + +// UnmarshalFromString convenient method to read from string instead of []byte +func UnmarshalFromString(str string, v interface{}) error { + return ConfigDefault.UnmarshalFromString(str, v) +} + +// Get quick method to get value from deeply nested JSON structure +func Get(data []byte, path ...interface{}) Any { + return ConfigDefault.Get(data, path...) +} + +// Marshal adapts to json/encoding Marshal API +// +// Marshal returns the JSON encoding of v, adapts to json/encoding Marshal API +// Refer to https://godoc.org/encoding/json#Marshal for more information +func Marshal(v interface{}) ([]byte, error) { + return ConfigDefault.Marshal(v) +} + +// MarshalIndent same as json.MarshalIndent. Prefix is not supported. +func MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) { + return ConfigDefault.MarshalIndent(v, prefix, indent) +} + +// MarshalToString convenient method to write as string instead of []byte +func MarshalToString(v interface{}) (string, error) { + return ConfigDefault.MarshalToString(v) +} + +// NewDecoder adapts to json/stream NewDecoder API. +// +// NewDecoder returns a new decoder that reads from r. +// +// Instead of a json/encoding Decoder, an Decoder is returned +// Refer to https://godoc.org/encoding/json#NewDecoder for more information +func NewDecoder(reader io.Reader) *Decoder { + return ConfigDefault.NewDecoder(reader) +} + +// Decoder reads and decodes JSON values from an input stream. +// Decoder provides identical APIs with json/stream Decoder (Token() and UseNumber() are in progress) +type Decoder struct { + iter *Iterator +} + +// Decode decode JSON into interface{} +func (adapter *Decoder) Decode(obj interface{}) error { + adapter.iter.ReadVal(obj) + err := adapter.iter.Error + if err == io.EOF { + return nil + } + return adapter.iter.Error +} + +// More is there more? +func (adapter *Decoder) More() bool { + return adapter.iter.head != adapter.iter.tail +} + +// Buffered remaining buffer +func (adapter *Decoder) Buffered() io.Reader { + remaining := adapter.iter.buf[adapter.iter.head:adapter.iter.tail] + return bytes.NewReader(remaining) +} + +// UseNumber for number JSON element, use float64 or json.NumberValue (alias of string) +func (adapter *Decoder) UseNumber() { + origCfg := adapter.iter.cfg.configBeforeFrozen + origCfg.UseNumber = true + adapter.iter.cfg = origCfg.Froze().(*frozenConfig) +} + +// NewEncoder same as json.NewEncoder +func NewEncoder(writer io.Writer) *Encoder { + return ConfigDefault.NewEncoder(writer) +} + +// Encoder same as json.Encoder +type Encoder struct { + stream *Stream +} + +// Encode encode interface{} as JSON to io.Writer +func (adapter *Encoder) Encode(val interface{}) error { + adapter.stream.WriteVal(val) + adapter.stream.WriteRaw("\n") + adapter.stream.Flush() + return adapter.stream.Error +} + +// SetIndent set the indention. Prefix is not supported +func (adapter *Encoder) SetIndent(prefix, indent string) { + adapter.stream.cfg.indentionStep = len(indent) +} + +// SetEscapeHTML escape html by default, set to false to disable +func (adapter *Encoder) SetEscapeHTML(escapeHTML bool) { + config := adapter.stream.cfg.configBeforeFrozen + config.EscapeHTML = escapeHTML + adapter.stream.cfg = config.Froze().(*frozenConfig) +} + +// Valid reports whether data is a valid JSON encoding. +func Valid(data []byte) bool { + return ConfigDefault.Valid(data) +} diff --git a/vendor/github.com/json-iterator/go/feature_any.go b/vendor/github.com/json-iterator/go/feature_any.go new file mode 100644 index 000000000..87716d1fc --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_any.go @@ -0,0 +1,245 @@ +package jsoniter + +import ( + "errors" + "fmt" + "io" + "reflect" +) + +// Any generic object representation. +// The lazy json implementation holds []byte and parse lazily. +type Any interface { + LastError() error + ValueType() ValueType + MustBeValid() Any + ToBool() bool + ToInt() int + ToInt32() int32 + ToInt64() int64 + ToUint() uint + ToUint32() uint32 + ToUint64() uint64 + ToFloat32() float32 + ToFloat64() float64 + ToString() string + ToVal(val interface{}) + Get(path ...interface{}) Any + // TODO: add Set + Size() int + Keys() []string + GetInterface() interface{} + WriteTo(stream *Stream) +} + +type baseAny struct{} + +func (any *baseAny) Get(path ...interface{}) Any { + return &invalidAny{baseAny{}, fmt.Errorf("Get %v from simple value", path)} +} + +func (any *baseAny) Size() int { + return 0 +} + +func (any *baseAny) Keys() []string { + return []string{} +} + +func (any *baseAny) ToVal(obj interface{}) { + panic("not implemented") +} + +// WrapInt32 turn int32 into Any interface +func WrapInt32(val int32) Any { + return &int32Any{baseAny{}, val} +} + +// WrapInt64 turn int64 into Any interface +func WrapInt64(val int64) Any { + return &int64Any{baseAny{}, val} +} + +// WrapUint32 turn uint32 into Any interface +func WrapUint32(val uint32) Any { + return &uint32Any{baseAny{}, val} +} + +// WrapUint64 turn uint64 into Any interface +func WrapUint64(val uint64) Any { + return &uint64Any{baseAny{}, val} +} + +// WrapFloat64 turn float64 into Any interface +func WrapFloat64(val float64) Any { + return &floatAny{baseAny{}, val} +} + +// WrapString turn string into Any interface +func WrapString(val string) Any { + return &stringAny{baseAny{}, val} +} + +// Wrap turn a go object into Any interface +func Wrap(val interface{}) Any { + if val == nil { + return &nilAny{} + } + asAny, isAny := val.(Any) + if isAny { + return asAny + } + typ := reflect.TypeOf(val) + switch typ.Kind() { + case reflect.Slice: + return wrapArray(val) + case reflect.Struct: + return wrapStruct(val) + case reflect.Map: + return wrapMap(val) + case reflect.String: + return WrapString(val.(string)) + case reflect.Int: + return WrapInt64(int64(val.(int))) + case reflect.Int8: + return WrapInt32(int32(val.(int8))) + case reflect.Int16: + return WrapInt32(int32(val.(int16))) + case reflect.Int32: + return WrapInt32(val.(int32)) + case reflect.Int64: + return WrapInt64(val.(int64)) + case reflect.Uint: + return WrapUint64(uint64(val.(uint))) + case reflect.Uint8: + return WrapUint32(uint32(val.(uint8))) + case reflect.Uint16: + return WrapUint32(uint32(val.(uint16))) + case reflect.Uint32: + return WrapUint32(uint32(val.(uint32))) + case reflect.Uint64: + return WrapUint64(val.(uint64)) + case reflect.Float32: + return WrapFloat64(float64(val.(float32))) + case reflect.Float64: + return WrapFloat64(val.(float64)) + case reflect.Bool: + if val.(bool) == true { + return &trueAny{} + } + return &falseAny{} + } + return &invalidAny{baseAny{}, fmt.Errorf("unsupported type: %v", typ)} +} + +// ReadAny read next JSON element as an Any object. It is a better json.RawMessage. +func (iter *Iterator) ReadAny() Any { + return iter.readAny() +} + +func (iter *Iterator) readAny() Any { + c := iter.nextToken() + switch c { + case '"': + iter.unreadByte() + return &stringAny{baseAny{}, iter.ReadString()} + case 'n': + iter.skipThreeBytes('u', 'l', 'l') // null + return &nilAny{} + case 't': + iter.skipThreeBytes('r', 'u', 'e') // true + return &trueAny{} + case 'f': + iter.skipFourBytes('a', 'l', 's', 'e') // false + return &falseAny{} + case '{': + return iter.readObjectAny() + case '[': + return iter.readArrayAny() + case '-': + return iter.readNumberAny(false) + case 0: + return &invalidAny{baseAny{}, errors.New("input is empty")} + default: + return iter.readNumberAny(true) + } +} + +func (iter *Iterator) readNumberAny(positive bool) Any { + iter.startCapture(iter.head - 1) + iter.skipNumber() + lazyBuf := iter.stopCapture() + return &numberLazyAny{baseAny{}, iter.cfg, lazyBuf, nil} +} + +func (iter *Iterator) readObjectAny() Any { + iter.startCapture(iter.head - 1) + iter.skipObject() + lazyBuf := iter.stopCapture() + return &objectLazyAny{baseAny{}, iter.cfg, lazyBuf, nil} +} + +func (iter *Iterator) readArrayAny() Any { + iter.startCapture(iter.head - 1) + iter.skipArray() + lazyBuf := iter.stopCapture() + return &arrayLazyAny{baseAny{}, iter.cfg, lazyBuf, nil} +} + +func locateObjectField(iter *Iterator, target string) []byte { + var found []byte + iter.ReadObjectCB(func(iter *Iterator, field string) bool { + if field == target { + found = iter.SkipAndReturnBytes() + return false + } + iter.Skip() + return true + }) + return found +} + +func locateArrayElement(iter *Iterator, target int) []byte { + var found []byte + n := 0 + iter.ReadArrayCB(func(iter *Iterator) bool { + if n == target { + found = iter.SkipAndReturnBytes() + return false + } + iter.Skip() + n++ + return true + }) + return found +} + +func locatePath(iter *Iterator, path []interface{}) Any { + for i, pathKeyObj := range path { + switch pathKey := pathKeyObj.(type) { + case string: + valueBytes := locateObjectField(iter, pathKey) + if valueBytes == nil { + return newInvalidAny(path[i:]) + } + iter.ResetBytes(valueBytes) + case int: + valueBytes := locateArrayElement(iter, pathKey) + if valueBytes == nil { + return newInvalidAny(path[i:]) + } + iter.ResetBytes(valueBytes) + case int32: + if '*' == pathKey { + return iter.readAny().Get(path[i:]...) + } + return newInvalidAny(path[i:]) + default: + return newInvalidAny(path[i:]) + } + } + if iter.Error != nil && iter.Error != io.EOF { + return &invalidAny{baseAny{}, iter.Error} + } + return iter.readAny() +} diff --git a/vendor/github.com/json-iterator/go/feature_any_array.go b/vendor/github.com/json-iterator/go/feature_any_array.go new file mode 100644 index 000000000..0449e9aa4 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_any_array.go @@ -0,0 +1,278 @@ +package jsoniter + +import ( + "reflect" + "unsafe" +) + +type arrayLazyAny struct { + baseAny + cfg *frozenConfig + buf []byte + err error +} + +func (any *arrayLazyAny) ValueType() ValueType { + return ArrayValue +} + +func (any *arrayLazyAny) MustBeValid() Any { + return any +} + +func (any *arrayLazyAny) LastError() error { + return any.err +} + +func (any *arrayLazyAny) ToBool() bool { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + return iter.ReadArray() +} + +func (any *arrayLazyAny) ToInt() int { + if any.ToBool() { + return 1 + } + return 0 +} + +func (any *arrayLazyAny) ToInt32() int32 { + if any.ToBool() { + return 1 + } + return 0 +} + +func (any *arrayLazyAny) ToInt64() int64 { + if any.ToBool() { + return 1 + } + return 0 +} + +func (any *arrayLazyAny) ToUint() uint { + if any.ToBool() { + return 1 + } + return 0 +} + +func (any *arrayLazyAny) ToUint32() uint32 { + if any.ToBool() { + return 1 + } + return 0 +} + +func (any *arrayLazyAny) ToUint64() uint64 { + if any.ToBool() { + return 1 + } + return 0 +} + +func (any *arrayLazyAny) ToFloat32() float32 { + if any.ToBool() { + return 1 + } + return 0 +} + +func (any *arrayLazyAny) ToFloat64() float64 { + if any.ToBool() { + return 1 + } + return 0 +} + +func (any *arrayLazyAny) ToString() string { + return *(*string)(unsafe.Pointer(&any.buf)) +} + +func (any *arrayLazyAny) ToVal(val interface{}) { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + iter.ReadVal(val) +} + +func (any *arrayLazyAny) Get(path ...interface{}) Any { + if len(path) == 0 { + return any + } + switch firstPath := path[0].(type) { + case int: + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + valueBytes := locateArrayElement(iter, firstPath) + if valueBytes == nil { + return newInvalidAny(path) + } + iter.ResetBytes(valueBytes) + return locatePath(iter, path[1:]) + case int32: + if '*' == firstPath { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + arr := make([]Any, 0) + iter.ReadArrayCB(func(iter *Iterator) bool { + found := iter.readAny().Get(path[1:]...) + if found.ValueType() != InvalidValue { + arr = append(arr, found) + } + return true + }) + return wrapArray(arr) + } + return newInvalidAny(path) + default: + return newInvalidAny(path) + } +} + +func (any *arrayLazyAny) Size() int { + size := 0 + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + iter.ReadArrayCB(func(iter *Iterator) bool { + size++ + iter.Skip() + return true + }) + return size +} + +func (any *arrayLazyAny) WriteTo(stream *Stream) { + stream.Write(any.buf) +} + +func (any *arrayLazyAny) GetInterface() interface{} { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + return iter.Read() +} + +type arrayAny struct { + baseAny + val reflect.Value +} + +func wrapArray(val interface{}) *arrayAny { + return &arrayAny{baseAny{}, reflect.ValueOf(val)} +} + +func (any *arrayAny) ValueType() ValueType { + return ArrayValue +} + +func (any *arrayAny) MustBeValid() Any { + return any +} + +func (any *arrayAny) LastError() error { + return nil +} + +func (any *arrayAny) ToBool() bool { + return any.val.Len() != 0 +} + +func (any *arrayAny) ToInt() int { + if any.val.Len() == 0 { + return 0 + } + return 1 +} + +func (any *arrayAny) ToInt32() int32 { + if any.val.Len() == 0 { + return 0 + } + return 1 +} + +func (any *arrayAny) ToInt64() int64 { + if any.val.Len() == 0 { + return 0 + } + return 1 +} + +func (any *arrayAny) ToUint() uint { + if any.val.Len() == 0 { + return 0 + } + return 1 +} + +func (any *arrayAny) ToUint32() uint32 { + if any.val.Len() == 0 { + return 0 + } + return 1 +} + +func (any *arrayAny) ToUint64() uint64 { + if any.val.Len() == 0 { + return 0 + } + return 1 +} + +func (any *arrayAny) ToFloat32() float32 { + if any.val.Len() == 0 { + return 0 + } + return 1 +} + +func (any *arrayAny) ToFloat64() float64 { + if any.val.Len() == 0 { + return 0 + } + return 1 +} + +func (any *arrayAny) ToString() string { + str, _ := MarshalToString(any.val.Interface()) + return str +} + +func (any *arrayAny) Get(path ...interface{}) Any { + if len(path) == 0 { + return any + } + switch firstPath := path[0].(type) { + case int: + if firstPath < 0 || firstPath >= any.val.Len() { + return newInvalidAny(path) + } + return Wrap(any.val.Index(firstPath).Interface()) + case int32: + if '*' == firstPath { + mappedAll := make([]Any, 0) + for i := 0; i < any.val.Len(); i++ { + mapped := Wrap(any.val.Index(i).Interface()).Get(path[1:]...) + if mapped.ValueType() != InvalidValue { + mappedAll = append(mappedAll, mapped) + } + } + return wrapArray(mappedAll) + } + return newInvalidAny(path) + default: + return newInvalidAny(path) + } +} + +func (any *arrayAny) Size() int { + return any.val.Len() +} + +func (any *arrayAny) WriteTo(stream *Stream) { + stream.WriteVal(any.val) +} + +func (any *arrayAny) GetInterface() interface{} { + return any.val.Interface() +} diff --git a/vendor/github.com/json-iterator/go/feature_any_bool.go b/vendor/github.com/json-iterator/go/feature_any_bool.go new file mode 100644 index 000000000..9452324af --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_any_bool.go @@ -0,0 +1,137 @@ +package jsoniter + +type trueAny struct { + baseAny +} + +func (any *trueAny) LastError() error { + return nil +} + +func (any *trueAny) ToBool() bool { + return true +} + +func (any *trueAny) ToInt() int { + return 1 +} + +func (any *trueAny) ToInt32() int32 { + return 1 +} + +func (any *trueAny) ToInt64() int64 { + return 1 +} + +func (any *trueAny) ToUint() uint { + return 1 +} + +func (any *trueAny) ToUint32() uint32 { + return 1 +} + +func (any *trueAny) ToUint64() uint64 { + return 1 +} + +func (any *trueAny) ToFloat32() float32 { + return 1 +} + +func (any *trueAny) ToFloat64() float64 { + return 1 +} + +func (any *trueAny) ToString() string { + return "true" +} + +func (any *trueAny) WriteTo(stream *Stream) { + stream.WriteTrue() +} + +func (any *trueAny) Parse() *Iterator { + return nil +} + +func (any *trueAny) GetInterface() interface{} { + return true +} + +func (any *trueAny) ValueType() ValueType { + return BoolValue +} + +func (any *trueAny) MustBeValid() Any { + return any +} + +type falseAny struct { + baseAny +} + +func (any *falseAny) LastError() error { + return nil +} + +func (any *falseAny) ToBool() bool { + return false +} + +func (any *falseAny) ToInt() int { + return 0 +} + +func (any *falseAny) ToInt32() int32 { + return 0 +} + +func (any *falseAny) ToInt64() int64 { + return 0 +} + +func (any *falseAny) ToUint() uint { + return 0 +} + +func (any *falseAny) ToUint32() uint32 { + return 0 +} + +func (any *falseAny) ToUint64() uint64 { + return 0 +} + +func (any *falseAny) ToFloat32() float32 { + return 0 +} + +func (any *falseAny) ToFloat64() float64 { + return 0 +} + +func (any *falseAny) ToString() string { + return "false" +} + +func (any *falseAny) WriteTo(stream *Stream) { + stream.WriteFalse() +} + +func (any *falseAny) Parse() *Iterator { + return nil +} + +func (any *falseAny) GetInterface() interface{} { + return false +} + +func (any *falseAny) ValueType() ValueType { + return BoolValue +} + +func (any *falseAny) MustBeValid() Any { + return any +} diff --git a/vendor/github.com/json-iterator/go/feature_any_float.go b/vendor/github.com/json-iterator/go/feature_any_float.go new file mode 100644 index 000000000..35fdb0949 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_any_float.go @@ -0,0 +1,83 @@ +package jsoniter + +import ( + "strconv" +) + +type floatAny struct { + baseAny + val float64 +} + +func (any *floatAny) Parse() *Iterator { + return nil +} + +func (any *floatAny) ValueType() ValueType { + return NumberValue +} + +func (any *floatAny) MustBeValid() Any { + return any +} + +func (any *floatAny) LastError() error { + return nil +} + +func (any *floatAny) ToBool() bool { + return any.ToFloat64() != 0 +} + +func (any *floatAny) ToInt() int { + return int(any.val) +} + +func (any *floatAny) ToInt32() int32 { + return int32(any.val) +} + +func (any *floatAny) ToInt64() int64 { + return int64(any.val) +} + +func (any *floatAny) ToUint() uint { + if any.val > 0 { + return uint(any.val) + } + return 0 +} + +func (any *floatAny) ToUint32() uint32 { + if any.val > 0 { + return uint32(any.val) + } + return 0 +} + +func (any *floatAny) ToUint64() uint64 { + if any.val > 0 { + return uint64(any.val) + } + return 0 +} + +func (any *floatAny) ToFloat32() float32 { + return float32(any.val) +} + +func (any *floatAny) ToFloat64() float64 { + return any.val +} + +func (any *floatAny) ToString() string { + return strconv.FormatFloat(any.val, 'E', -1, 64) +} + +func (any *floatAny) WriteTo(stream *Stream) { + stream.WriteFloat64(any.val) +} + +func (any *floatAny) GetInterface() interface{} { + return any.val +} diff --git a/vendor/github.com/json-iterator/go/feature_any_int32.go b/vendor/github.com/json-iterator/go/feature_any_int32.go new file mode 100644 index 000000000..1b56f3991 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_any_int32.go @@ -0,0 +1,74 @@ +package jsoniter + +import ( + "strconv" +) + +type int32Any struct { + baseAny + val int32 +} + +func (any *int32Any) LastError() error { + return nil +} + +func (any *int32Any) ValueType() ValueType { + return NumberValue +} + +func (any *int32Any) MustBeValid() Any { + return any +} + +func (any *int32Any) ToBool() bool { + return any.val != 0 +} + +func (any *int32Any) ToInt() int { + return int(any.val) +} + +func (any *int32Any) ToInt32() int32 { + return any.val +} + +func (any *int32Any) ToInt64() int64 { + return int64(any.val) +} + +func (any *int32Any) ToUint() uint { + return uint(any.val) +} + +func (any *int32Any) ToUint32() uint32 { + return uint32(any.val) +} + +func (any *int32Any) ToUint64() uint64 { + return uint64(any.val) +} + +func (any *int32Any) ToFloat32() float32 { + return float32(any.val) +} + +func (any *int32Any) ToFloat64() float64 { + return float64(any.val) +} + +func (any *int32Any) ToString() string { + return strconv.FormatInt(int64(any.val), 10) +} + +func (any *int32Any) WriteTo(stream *Stream) { + stream.WriteInt32(any.val) +} + +func (any *int32Any) Parse() *Iterator { + return nil +} + +func (any *int32Any) GetInterface() interface{} { + return any.val +} diff --git a/vendor/github.com/json-iterator/go/feature_any_int64.go b/vendor/github.com/json-iterator/go/feature_any_int64.go new file mode 100644 index 000000000..c440d72b6 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_any_int64.go @@ -0,0 +1,74 @@ +package jsoniter + +import ( + "strconv" +) + +type int64Any struct { + baseAny + val int64 +} + +func (any *int64Any) LastError() error { + return nil +} + +func (any *int64Any) ValueType() ValueType { + return NumberValue +} + +func (any *int64Any) MustBeValid() Any { + return any +} + +func (any *int64Any) ToBool() bool { + return any.val != 0 +} + +func (any *int64Any) ToInt() int { + return int(any.val) +} + +func (any *int64Any) ToInt32() int32 { + return int32(any.val) +} + +func (any *int64Any) ToInt64() int64 { + return any.val +} + +func (any *int64Any) ToUint() uint { + return uint(any.val) +} + +func (any *int64Any) ToUint32() uint32 { + return uint32(any.val) +} + +func (any *int64Any) ToUint64() uint64 { + return uint64(any.val) +} + +func (any *int64Any) ToFloat32() float32 { + return float32(any.val) +} + +func (any *int64Any) ToFloat64() float64 { + return float64(any.val) +} + +func (any *int64Any) ToString() string { + return strconv.FormatInt(any.val, 10) +} + +func (any *int64Any) WriteTo(stream *Stream) { + stream.WriteInt64(any.val) +} + +func (any *int64Any) Parse() *Iterator { + return nil +} + +func (any *int64Any) GetInterface() interface{} { + return any.val +} diff --git a/vendor/github.com/json-iterator/go/feature_any_invalid.go b/vendor/github.com/json-iterator/go/feature_any_invalid.go new file mode 100644 index 000000000..1d859eac3 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_any_invalid.go @@ -0,0 +1,82 @@ +package jsoniter + +import "fmt" + +type invalidAny struct { + baseAny + err error +} + +func newInvalidAny(path []interface{}) *invalidAny { + return &invalidAny{baseAny{}, fmt.Errorf("%v not found", path)} +} + +func (any *invalidAny) LastError() error { + return any.err +} + +func (any *invalidAny) ValueType() ValueType { + return InvalidValue +} + +func (any *invalidAny) MustBeValid() Any { + panic(any.err) +} + +func (any *invalidAny) ToBool() bool { + return false +} + +func (any *invalidAny) ToInt() int { + return 0 +} + +func (any *invalidAny) ToInt32() int32 { + return 0 +} + +func (any *invalidAny) ToInt64() int64 { + return 0 +} + +func (any *invalidAny) ToUint() uint { + return 0 +} + +func (any *invalidAny) ToUint32() uint32 { + return 0 +} + +func (any *invalidAny) ToUint64() uint64 { + return 0 +} + +func (any *invalidAny) ToFloat32() float32 { + return 0 +} + +func (any *invalidAny) ToFloat64() float64 { + return 0 +} + +func (any *invalidAny) ToString() string { + return "" +} + +func (any *invalidAny) WriteTo(stream *Stream) { +} + +func (any *invalidAny) Get(path ...interface{}) Any { + if any.err == nil { + return &invalidAny{baseAny{}, fmt.Errorf("get %v from invalid", path)} + } + return &invalidAny{baseAny{}, fmt.Errorf("%v, get %v from invalid", any.err, path)} +} + +func (any *invalidAny) Parse() *Iterator { + return nil +} + +func (any *invalidAny) GetInterface() interface{} { + return nil +} diff --git a/vendor/github.com/json-iterator/go/feature_any_nil.go b/vendor/github.com/json-iterator/go/feature_any_nil.go new file mode 100644 index 000000000..d04cb54c1 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_any_nil.go @@ -0,0 +1,69 @@ +package jsoniter + +type nilAny struct { + baseAny +} + +func (any *nilAny) LastError() error { + return nil +} + +func (any *nilAny) ValueType() ValueType { + return NilValue +} + +func (any *nilAny) MustBeValid() Any { + return any +} + +func (any *nilAny) ToBool() bool { + return false +} + +func (any *nilAny) ToInt() int { + return 0 +} + +func (any *nilAny) ToInt32() int32 { + return 0 +} + +func (any *nilAny) ToInt64() int64 { + return 0 +} + +func (any *nilAny) ToUint() uint { + return 0 +} + +func (any *nilAny) ToUint32() uint32 { + return 0 +} + +func (any *nilAny) ToUint64() uint64 { + return 0 +} + +func (any *nilAny) ToFloat32() float32 { + return 0 +} + +func (any *nilAny) ToFloat64() float64 { + return 0 +} + +func (any *nilAny) ToString() string { + return "" +} + +func (any *nilAny) WriteTo(stream *Stream) { + stream.WriteNil() +} + +func (any *nilAny) Parse() *Iterator { + return nil +} + +func (any *nilAny) GetInterface() interface{} { + return nil +} diff --git a/vendor/github.com/json-iterator/go/feature_any_number.go b/vendor/github.com/json-iterator/go/feature_any_number.go new file mode 100644 index 000000000..4e1c27641 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_any_number.go @@ -0,0 +1,104 @@ +package jsoniter + +import "unsafe" + +type numberLazyAny struct { + baseAny + cfg *frozenConfig + buf []byte + err error +} + +func (any *numberLazyAny) ValueType() ValueType { + return NumberValue +} + +func (any *numberLazyAny) MustBeValid() Any { + return any +} + +func (any *numberLazyAny) LastError() error { + return any.err +} + +func (any *numberLazyAny) ToBool() bool { + return any.ToFloat64() != 0 +} + +func (any *numberLazyAny) ToInt() int { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + val := iter.ReadInt() + any.err = iter.Error + return val +} + +func (any *numberLazyAny) ToInt32() int32 { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + val := iter.ReadInt32() + any.err = iter.Error + return val +} + +func (any *numberLazyAny) ToInt64() int64 { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + val := iter.ReadInt64() + any.err = iter.Error + return val +} + +func (any *numberLazyAny) ToUint() uint { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + val := iter.ReadUint() + any.err = iter.Error + return val +} + +func (any *numberLazyAny) ToUint32() uint32 { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + val := iter.ReadUint32() + any.err = iter.Error + return val +} + +func (any *numberLazyAny) ToUint64() uint64 { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + val := iter.ReadUint64() + any.err = iter.Error + return val +} + +func (any *numberLazyAny) ToFloat32() float32 { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + val := iter.ReadFloat32() + any.err = iter.Error + return val +} + +func (any *numberLazyAny) ToFloat64() float64 { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + val := iter.ReadFloat64() + any.err = iter.Error + return val +} + +func (any *numberLazyAny) ToString() string { + return *(*string)(unsafe.Pointer(&any.buf)) +} + +func (any *numberLazyAny) WriteTo(stream *Stream) { + stream.Write(any.buf) +} + +func (any *numberLazyAny) GetInterface() interface{} { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + return iter.Read() +} diff --git a/vendor/github.com/json-iterator/go/feature_any_object.go b/vendor/github.com/json-iterator/go/feature_any_object.go new file mode 100644 index 000000000..c44ef5c98 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_any_object.go @@ -0,0 +1,374 @@ +package jsoniter + +import ( + "reflect" + "unsafe" +) + +type objectLazyAny struct { + baseAny + cfg *frozenConfig + buf []byte + err error +} + +func (any *objectLazyAny) ValueType() ValueType { + return ObjectValue +} + +func (any *objectLazyAny) MustBeValid() Any { + return any +} + +func (any *objectLazyAny) LastError() error { + return any.err +} + +func (any *objectLazyAny) ToBool() bool { + return true +} + +func (any *objectLazyAny) ToInt() int { + return 0 +} + +func (any *objectLazyAny) ToInt32() int32 { + return 0 +} + +func (any *objectLazyAny) ToInt64() int64 { + return 0 +} + +func (any *objectLazyAny) ToUint() uint { + return 0 +} + +func (any *objectLazyAny) ToUint32() uint32 { + return 0 +} + +func (any *objectLazyAny) ToUint64() uint64 { + return 0 +} + +func (any *objectLazyAny) ToFloat32() float32 { + return 0 +} + +func (any *objectLazyAny) ToFloat64() float64 { + return 0 +} + +func (any *objectLazyAny) ToString() string { + return *(*string)(unsafe.Pointer(&any.buf)) +} + +func (any *objectLazyAny) ToVal(obj interface{}) { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + iter.ReadVal(obj) +} + +func (any *objectLazyAny) Get(path ...interface{}) Any { + if len(path) == 0 { + return any + } + switch firstPath := path[0].(type) { + case string: + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + valueBytes := locateObjectField(iter, firstPath) + if valueBytes == nil { + return newInvalidAny(path) + } + iter.ResetBytes(valueBytes) + return locatePath(iter, path[1:]) + case int32: + if '*' == firstPath { + mappedAll := map[string]Any{} + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + iter.ReadMapCB(func(iter *Iterator, field string) bool { + mapped := locatePath(iter, path[1:]) + if mapped.ValueType() != InvalidValue { + mappedAll[field] = mapped + } + return true + }) + return wrapMap(mappedAll) + } + return newInvalidAny(path) + default: + return newInvalidAny(path) + } +} + +func (any *objectLazyAny) Keys() []string { + keys := []string{} + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + iter.ReadMapCB(func(iter *Iterator, field string) bool { + iter.Skip() + keys = append(keys, field) + return true + }) + return keys +} + +func (any *objectLazyAny) Size() int { + size := 0 + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + iter.ReadObjectCB(func(iter *Iterator, field string) bool { + iter.Skip() + size++ + return true + }) + return size +} + +func (any *objectLazyAny) WriteTo(stream *Stream) { + stream.Write(any.buf) +} + +func (any *objectLazyAny) GetInterface() interface{} { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + return iter.Read() +} + +type objectAny struct { + baseAny + err error + val reflect.Value +} + +func wrapStruct(val interface{}) *objectAny { + return &objectAny{baseAny{}, nil, reflect.ValueOf(val)} +} + +func (any *objectAny) ValueType() ValueType { + return ObjectValue +} + +func (any *objectAny) MustBeValid() Any { + return any +} + +func (any *objectAny) Parse() *Iterator { + return nil +} + +func (any *objectAny) LastError() error { + return any.err +} + +func (any *objectAny) ToBool() bool { + return any.val.NumField() != 0 +} + +func (any *objectAny) ToInt() int { + return 0 +} + +func (any *objectAny) ToInt32() int32 { + return 0 +} + +func (any *objectAny) ToInt64() int64 { + return 0 +} + +func (any *objectAny) ToUint() uint { + return 0 +} + +func (any *objectAny) ToUint32() uint32 { + return 0 +} + +func (any *objectAny) ToUint64() uint64 { + return 0 +} + +func (any *objectAny) ToFloat32() float32 { + return 0 +} + +func (any *objectAny) ToFloat64() float64 { + return 0 +} + +func (any *objectAny) ToString() string { + str, err := MarshalToString(any.val.Interface()) + any.err = err + return str +} + +func (any *objectAny) Get(path ...interface{}) Any { + if len(path) == 0 { + return any + } + switch firstPath := path[0].(type) { + case string: + field := any.val.FieldByName(firstPath) + if !field.IsValid() { + return newInvalidAny(path) + } + return Wrap(field.Interface()) + case int32: + if '*' == firstPath { + mappedAll := map[string]Any{} + for i := 0; i < any.val.NumField(); i++ { + field := any.val.Field(i) + if field.CanInterface() { + mapped := Wrap(field.Interface()).Get(path[1:]...) + if mapped.ValueType() != InvalidValue { + mappedAll[any.val.Type().Field(i).Name] = mapped + } + } + } + return wrapMap(mappedAll) + } + return newInvalidAny(path) + default: + return newInvalidAny(path) + } +} + +func (any *objectAny) Keys() []string { + keys := make([]string, 0, any.val.NumField()) + for i := 0; i < any.val.NumField(); i++ { + keys = append(keys, any.val.Type().Field(i).Name) + } + return keys +} + +func (any *objectAny) Size() int { + return any.val.NumField() +} + +func (any *objectAny) WriteTo(stream *Stream) { + stream.WriteVal(any.val) +} + +func (any *objectAny) GetInterface() interface{} { + return any.val.Interface() +} + +type mapAny struct { + baseAny + err error + val reflect.Value +} + +func wrapMap(val interface{}) *mapAny { + return &mapAny{baseAny{}, nil, reflect.ValueOf(val)} +} + +func (any *mapAny) ValueType() ValueType { + return ObjectValue +} + +func (any *mapAny) MustBeValid() Any { + return any +} + +func (any *mapAny) Parse() *Iterator { + return nil +} + +func (any *mapAny) LastError() error { + return any.err +} + +func (any *mapAny) ToBool() bool { + return true +} + +func (any *mapAny) ToInt() int { + return 0 +} + +func (any *mapAny) ToInt32() int32 { + return 0 +} + +func (any *mapAny) ToInt64() int64 { + return 0 +} + +func (any *mapAny) ToUint() uint { + return 0 +} + +func (any *mapAny) ToUint32() uint32 { + return 0 +} + +func (any *mapAny) ToUint64() uint64 { + return 0 +} + +func (any *mapAny) ToFloat32() float32 { + return 0 +} + +func (any *mapAny) ToFloat64() float64 { + return 0 +} + +func (any *mapAny) ToString() string { + str, err := MarshalToString(any.val.Interface()) + any.err = err + return str +} + +func (any *mapAny) Get(path ...interface{}) Any { + if len(path) == 0 { + return any + } + switch firstPath := path[0].(type) { + case int32: + if '*' == firstPath { + mappedAll := map[string]Any{} + for _, key := range any.val.MapKeys() { + keyAsStr := key.String() + element := Wrap(any.val.MapIndex(key).Interface()) + mapped := element.Get(path[1:]...) + if mapped.ValueType() != InvalidValue { + mappedAll[keyAsStr] = mapped + } + } + return wrapMap(mappedAll) + } + return newInvalidAny(path) + default: + value := any.val.MapIndex(reflect.ValueOf(firstPath)) + if !value.IsValid() { + return newInvalidAny(path) + } + return Wrap(value.Interface()) + } +} + +func (any *mapAny) Keys() []string { + keys := make([]string, 0, any.val.Len()) + for _, key := range any.val.MapKeys() { + keys = append(keys, key.String()) + } + return keys +} + +func (any *mapAny) Size() int { + return any.val.Len() +} + +func (any *mapAny) WriteTo(stream *Stream) { + stream.WriteVal(any.val) +} + +func (any *mapAny) GetInterface() interface{} { + return any.val.Interface() +} diff --git a/vendor/github.com/json-iterator/go/feature_any_string.go b/vendor/github.com/json-iterator/go/feature_any_string.go new file mode 100644 index 000000000..abf060bd5 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_any_string.go @@ -0,0 +1,166 @@ +package jsoniter + +import ( + "fmt" + "strconv" +) + +type stringAny struct { + baseAny + val string +} + +func (any *stringAny) Get(path ...interface{}) Any { + if len(path) == 0 { + return any + } + return &invalidAny{baseAny{}, fmt.Errorf("Get %v from simple value", path)} +} + +func (any *stringAny) Parse() *Iterator { + return nil +} + +func (any *stringAny) ValueType() ValueType { + return StringValue +} + +func (any *stringAny) MustBeValid() Any { + return any +} + +func (any *stringAny) LastError() error { + return nil +} + +func (any *stringAny) ToBool() bool { + str := any.ToString() + if str == "0" { + return false + } + for _, c := range str { + switch c { + case ' ', '\n', '\r', '\t': + default: + return true + } + } + return false +} + +func (any *stringAny) ToInt() int { + return int(any.ToInt64()) + +} + +func (any *stringAny) ToInt32() int32 { + return int32(any.ToInt64()) +} + +func (any *stringAny) ToInt64() int64 { + if any.val == "" { + return 0 + } + + flag := 1 + startPos := 0 + endPos := 0 + if any.val[0] == '+' || any.val[0] == '-' { + startPos = 1 + } + + if any.val[0] == '-' { + flag = -1 + } + + for i := startPos; i < len(any.val); i++ { + if any.val[i] >= '0' && any.val[i] <= '9' { + endPos = i + 1 + } else { + break + } + } + parsed, _ := strconv.ParseInt(any.val[startPos:endPos], 10, 64) + return int64(flag) * parsed +} + +func (any *stringAny) ToUint() uint { + return uint(any.ToUint64()) +} + +func (any *stringAny) ToUint32() uint32 { + return uint32(any.ToUint64()) +} + +func (any *stringAny) ToUint64() uint64 { + if any.val == "" { + return 0 + } + + startPos := 0 + endPos := 0 + + if any.val[0] == '-' { + return 0 + } + if any.val[0] == '+' { + startPos = 1 + } + + for i := startPos; i < len(any.val); i++ { + if any.val[i] >= '0' && any.val[i] <= '9' { + endPos = i + 1 + } else { + break + } + } + parsed, _ := strconv.ParseUint(any.val[startPos:endPos], 10, 64) + return parsed +} + +func (any *stringAny) ToFloat32() float32 { + return float32(any.ToFloat64()) +} + +func (any *stringAny) ToFloat64() float64 { + if len(any.val) == 0 { + return 0 + } + + // first char invalid + if any.val[0] != '+' && any.val[0] != '-' && (any.val[0] > '9' || any.val[0] < '0') { + return 0 + } + + // extract valid num expression from string + // eg 123true => 123, -12.12xxa => -12.12 + endPos := 1 + for i := 1; i < len(any.val); i++ { + if any.val[i] == '.' || any.val[i] == 'e' || any.val[i] == 'E' || any.val[i] == '+' || any.val[i] == '-' { + endPos = i + 1 + continue + } + + // end position is the first char which is not digit + if any.val[i] >= '0' && any.val[i] <= '9' { + endPos = i + 1 + } else { + endPos = i + break + } + } + parsed, _ := strconv.ParseFloat(any.val[:endPos], 64) + return parsed +} + +func (any *stringAny) ToString() string { + return any.val +} + +func (any *stringAny) WriteTo(stream *Stream) { + stream.WriteString(any.val) +} + +func (any *stringAny) GetInterface() interface{} { + return any.val +} diff --git a/vendor/github.com/json-iterator/go/feature_any_uint32.go b/vendor/github.com/json-iterator/go/feature_any_uint32.go new file mode 100644 index 000000000..656bbd33d --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_any_uint32.go @@ -0,0 +1,74 @@ +package jsoniter + +import ( + "strconv" +) + +type uint32Any struct { + baseAny + val uint32 +} + +func (any *uint32Any) LastError() error { + return nil +} + +func (any *uint32Any) ValueType() ValueType { + return NumberValue +} + +func (any *uint32Any) MustBeValid() Any { + return any +} + +func (any *uint32Any) ToBool() bool { + return any.val != 0 +} + +func (any *uint32Any) ToInt() int { + return int(any.val) +} + +func (any *uint32Any) ToInt32() int32 { + return int32(any.val) +} + +func (any *uint32Any) ToInt64() int64 { + return int64(any.val) +} + +func (any *uint32Any) ToUint() uint { + return uint(any.val) +} + +func (any *uint32Any) ToUint32() uint32 { + return any.val +} + +func (any *uint32Any) ToUint64() uint64 { + return uint64(any.val) +} + +func (any *uint32Any) ToFloat32() float32 { + return float32(any.val) +} + +func (any *uint32Any) ToFloat64() float64 { + return float64(any.val) +} + +func (any *uint32Any) ToString() string { + return strconv.FormatInt(int64(any.val), 10) +} + +func (any *uint32Any) WriteTo(stream *Stream) { + stream.WriteUint32(any.val) +} + +func (any *uint32Any) Parse() *Iterator { + return nil +} + +func (any *uint32Any) GetInterface() interface{} { + return any.val +} diff --git a/vendor/github.com/json-iterator/go/feature_any_uint64.go b/vendor/github.com/json-iterator/go/feature_any_uint64.go new file mode 100644 index 000000000..7df2fce33 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_any_uint64.go @@ -0,0 +1,74 @@ +package jsoniter + +import ( + "strconv" +) + +type uint64Any struct { + baseAny + val uint64 +} + +func (any *uint64Any) LastError() error { + return nil +} + +func (any *uint64Any) ValueType() ValueType { + return NumberValue +} + +func (any *uint64Any) MustBeValid() Any { + return any +} + +func (any *uint64Any) ToBool() bool { + return any.val != 0 +} + +func (any *uint64Any) ToInt() int { + return int(any.val) +} + +func (any *uint64Any) ToInt32() int32 { + return int32(any.val) +} + +func (any *uint64Any) ToInt64() int64 { + return int64(any.val) +} + +func (any *uint64Any) ToUint() uint { + return uint(any.val) +} + +func (any *uint64Any) ToUint32() uint32 { + return uint32(any.val) +} + +func (any *uint64Any) ToUint64() uint64 { + return any.val +} + +func (any *uint64Any) ToFloat32() float32 { + return float32(any.val) +} + +func (any *uint64Any) ToFloat64() float64 { + return float64(any.val) +} + +func (any *uint64Any) ToString() string { + return strconv.FormatUint(any.val, 10) +} + +func (any *uint64Any) WriteTo(stream *Stream) { + stream.WriteUint64(any.val) +} + +func (any *uint64Any) Parse() *Iterator { + return nil +} + +func (any *uint64Any) GetInterface() interface{} { + return any.val +} diff --git a/vendor/github.com/json-iterator/go/feature_config.go b/vendor/github.com/json-iterator/go/feature_config.go new file mode 100644 index 000000000..140679536 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_config.go @@ -0,0 +1,347 @@ +package jsoniter + +import ( + "encoding/json" + "errors" + "io" + "reflect" + "sync/atomic" + "unsafe" +) + +// Config customize how the API should behave. +// The API is created from Config by Froze. +type Config struct { + IndentionStep int + MarshalFloatWith6Digits bool + EscapeHTML bool + SortMapKeys bool + UseNumber bool + TagKey string + ValidateJsonRawMessage bool + ObjectFieldMustBeSimpleString bool +} + +type frozenConfig struct { + configBeforeFrozen Config + sortMapKeys bool + indentionStep int + objectFieldMustBeSimpleString bool + decoderCache unsafe.Pointer + encoderCache unsafe.Pointer + extensions []Extension + streamPool chan *Stream + iteratorPool chan *Iterator +} + +// API the public interface of this package. +// Primary Marshal and Unmarshal. +type API interface { + IteratorPool + StreamPool + MarshalToString(v interface{}) (string, error) + Marshal(v interface{}) ([]byte, error) + MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) + UnmarshalFromString(str string, v interface{}) error + Unmarshal(data []byte, v interface{}) error + Get(data []byte, path ...interface{}) Any + NewEncoder(writer io.Writer) *Encoder + NewDecoder(reader io.Reader) *Decoder + Valid(data []byte) bool +} + +// ConfigDefault the default API +var ConfigDefault = Config{ + EscapeHTML: true, +}.Froze() + +// ConfigCompatibleWithStandardLibrary tries to be 100% compatible with standard library behavior +var ConfigCompatibleWithStandardLibrary = Config{ + EscapeHTML: true, + SortMapKeys: true, + ValidateJsonRawMessage: true, +}.Froze() + +// ConfigFastest marshals float with only 6 digits precision +var ConfigFastest = Config{ + EscapeHTML: false, + MarshalFloatWith6Digits: true, // will lose precession + ObjectFieldMustBeSimpleString: true, // do not unescape object field +}.Froze() + +// Froze forge API from config +func (cfg Config) Froze() API { + // TODO: cache frozen config + frozenConfig := &frozenConfig{ + sortMapKeys: cfg.SortMapKeys, + indentionStep: cfg.IndentionStep, + objectFieldMustBeSimpleString: cfg.ObjectFieldMustBeSimpleString, + streamPool: make(chan *Stream, 16), + iteratorPool: make(chan *Iterator, 16), + } + atomic.StorePointer(&frozenConfig.decoderCache, unsafe.Pointer(&map[string]ValDecoder{})) + atomic.StorePointer(&frozenConfig.encoderCache, unsafe.Pointer(&map[string]ValEncoder{})) + if cfg.MarshalFloatWith6Digits { + frozenConfig.marshalFloatWith6Digits() + } + if cfg.EscapeHTML { + frozenConfig.escapeHTML() + } + if cfg.UseNumber { + frozenConfig.useNumber() + } + if cfg.ValidateJsonRawMessage { + frozenConfig.validateJsonRawMessage() + } + frozenConfig.configBeforeFrozen = cfg + return frozenConfig +} + +func (cfg *frozenConfig) validateJsonRawMessage() { + encoder := &funcEncoder{func(ptr unsafe.Pointer, stream *Stream) { + rawMessage := *(*json.RawMessage)(ptr) + iter := cfg.BorrowIterator([]byte(rawMessage)) + iter.Read() + if iter.Error != nil { + stream.WriteRaw("null") + } else { + cfg.ReturnIterator(iter) + stream.WriteRaw(string(rawMessage)) + } + }, func(ptr unsafe.Pointer) bool { + return false + }} + cfg.addEncoderToCache(reflect.TypeOf((*json.RawMessage)(nil)).Elem(), encoder) + cfg.addEncoderToCache(reflect.TypeOf((*RawMessage)(nil)).Elem(), encoder) +} + +func (cfg *frozenConfig) useNumber() { + cfg.addDecoderToCache(reflect.TypeOf((*interface{})(nil)).Elem(), &funcDecoder{func(ptr unsafe.Pointer, iter *Iterator) { + if iter.WhatIsNext() == NumberValue { + *((*interface{})(ptr)) = json.Number(iter.readNumberAsString()) + } else { + *((*interface{})(ptr)) = iter.Read() + } + }}) +} +func (cfg *frozenConfig) getTagKey() string { + tagKey := cfg.configBeforeFrozen.TagKey + if tagKey == "" { + return "json" + } + return tagKey +} + +func (cfg *frozenConfig) registerExtension(extension Extension) { + cfg.extensions = append(cfg.extensions, extension) +} + +type lossyFloat32Encoder struct { +} + +func (encoder *lossyFloat32Encoder) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteFloat32Lossy(*((*float32)(ptr))) +} + +func (encoder *lossyFloat32Encoder) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, encoder) +} + +func (encoder *lossyFloat32Encoder) IsEmpty(ptr unsafe.Pointer) bool { + return *((*float32)(ptr)) == 0 +} + +type lossyFloat64Encoder struct { +} + +func (encoder *lossyFloat64Encoder) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteFloat64Lossy(*((*float64)(ptr))) +} + +func (encoder *lossyFloat64Encoder) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, encoder) +} + +func (encoder *lossyFloat64Encoder) IsEmpty(ptr unsafe.Pointer) bool { + return *((*float64)(ptr)) == 0 +} + +// EnableLossyFloatMarshalling keeps 10**(-6) precision +// for float variables for better performance. +func (cfg *frozenConfig) marshalFloatWith6Digits() { + // for better performance + cfg.addEncoderToCache(reflect.TypeOf((*float32)(nil)).Elem(), &lossyFloat32Encoder{}) + cfg.addEncoderToCache(reflect.TypeOf((*float64)(nil)).Elem(), &lossyFloat64Encoder{}) +} + +type htmlEscapedStringEncoder struct { +} + +func (encoder *htmlEscapedStringEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + str := *((*string)(ptr)) + stream.WriteStringWithHTMLEscaped(str) +} + +func (encoder *htmlEscapedStringEncoder) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, encoder) +} + +func (encoder *htmlEscapedStringEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return *((*string)(ptr)) == "" +} + +func (cfg *frozenConfig) escapeHTML() { + cfg.addEncoderToCache(reflect.TypeOf((*string)(nil)).Elem(), &htmlEscapedStringEncoder{}) +} + +func (cfg *frozenConfig) addDecoderToCache(cacheKey reflect.Type, decoder ValDecoder) { + done := false + for !done { + ptr := atomic.LoadPointer(&cfg.decoderCache) + cache := *(*map[reflect.Type]ValDecoder)(ptr) + copied := map[reflect.Type]ValDecoder{} + for k, v := range cache { + copied[k] = v + } + copied[cacheKey] = decoder + done = atomic.CompareAndSwapPointer(&cfg.decoderCache, ptr, unsafe.Pointer(&copied)) + } +} + +func (cfg *frozenConfig) addEncoderToCache(cacheKey reflect.Type, encoder ValEncoder) { + done := false + for !done { + ptr := atomic.LoadPointer(&cfg.encoderCache) + cache := *(*map[reflect.Type]ValEncoder)(ptr) + copied := map[reflect.Type]ValEncoder{} + for k, v := range cache { + copied[k] = v + } + copied[cacheKey] = encoder + done = atomic.CompareAndSwapPointer(&cfg.encoderCache, ptr, unsafe.Pointer(&copied)) + } +} + +func (cfg *frozenConfig) getDecoderFromCache(cacheKey reflect.Type) ValDecoder { + ptr := atomic.LoadPointer(&cfg.decoderCache) + cache := *(*map[reflect.Type]ValDecoder)(ptr) + return cache[cacheKey] +} + +func (cfg *frozenConfig) getEncoderFromCache(cacheKey reflect.Type) ValEncoder { + ptr := atomic.LoadPointer(&cfg.encoderCache) + cache := *(*map[reflect.Type]ValEncoder)(ptr) + return cache[cacheKey] +} + +func (cfg *frozenConfig) cleanDecoders() { + typeDecoders = map[string]ValDecoder{} + fieldDecoders = map[string]ValDecoder{} + *cfg = *(cfg.configBeforeFrozen.Froze().(*frozenConfig)) +} + +func (cfg *frozenConfig) cleanEncoders() { + typeEncoders = map[string]ValEncoder{} + fieldEncoders = map[string]ValEncoder{} + *cfg = *(cfg.configBeforeFrozen.Froze().(*frozenConfig)) +} + +func (cfg *frozenConfig) MarshalToString(v interface{}) (string, error) { + stream := cfg.BorrowStream(nil) + defer cfg.ReturnStream(stream) + stream.WriteVal(v) + if stream.Error != nil { + return "", stream.Error + } + return string(stream.Buffer()), nil +} + +func (cfg *frozenConfig) Marshal(v interface{}) ([]byte, error) { + stream := cfg.BorrowStream(nil) + defer cfg.ReturnStream(stream) + stream.WriteVal(v) + if stream.Error != nil { + return nil, stream.Error + } + result := stream.Buffer() + copied := make([]byte, len(result)) + copy(copied, result) + return copied, nil +} + +func (cfg *frozenConfig) MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) { + if prefix != "" { + panic("prefix is not supported") + } + for _, r := range indent { + if r != ' ' { + panic("indent can only be space") + } + } + newCfg := cfg.configBeforeFrozen + newCfg.IndentionStep = len(indent) + return newCfg.Froze().Marshal(v) +} + +func (cfg *frozenConfig) UnmarshalFromString(str string, v interface{}) error { + data := []byte(str) + data = data[:lastNotSpacePos(data)] + iter := cfg.BorrowIterator(data) + defer cfg.ReturnIterator(iter) + iter.ReadVal(v) + if iter.head == iter.tail { + iter.loadMore() + } + if iter.Error == io.EOF { + return nil + } + if iter.Error == nil { + iter.ReportError("UnmarshalFromString", "there are bytes left after unmarshal") + } + return iter.Error +} + +func (cfg *frozenConfig) Get(data []byte, path ...interface{}) Any { + iter := cfg.BorrowIterator(data) + defer cfg.ReturnIterator(iter) + return locatePath(iter, path) +} + +func (cfg *frozenConfig) Unmarshal(data []byte, v interface{}) error { + data = data[:lastNotSpacePos(data)] + iter := cfg.BorrowIterator(data) + defer cfg.ReturnIterator(iter) + typ := reflect.TypeOf(v) + if typ.Kind() != reflect.Ptr { + // return non-pointer error + return errors.New("the second param must be ptr type") + } + iter.ReadVal(v) + if iter.head == iter.tail { + iter.loadMore() + } + if iter.Error == io.EOF { + return nil + } + if iter.Error == nil { + iter.ReportError("Unmarshal", "there are bytes left after unmarshal") + } + return iter.Error +} + +func (cfg *frozenConfig) NewEncoder(writer io.Writer) *Encoder { + stream := NewStream(cfg, writer, 512) + return &Encoder{stream} +} + +func (cfg *frozenConfig) NewDecoder(reader io.Reader) *Decoder { + iter := Parse(cfg, reader, 512) + return &Decoder{iter} +} + +func (cfg *frozenConfig) Valid(data []byte) bool { + iter := cfg.BorrowIterator(data) + defer cfg.ReturnIterator(iter) + iter.Skip() + return iter.Error == nil +} diff --git a/vendor/github.com/json-iterator/go/feature_iter.go b/vendor/github.com/json-iterator/go/feature_iter.go new file mode 100644 index 000000000..95ae54fbf --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_iter.go @@ -0,0 +1,322 @@ +package jsoniter + +import ( + "encoding/json" + "fmt" + "io" +) + +// ValueType the type for JSON element +type ValueType int + +const ( + // InvalidValue invalid JSON element + InvalidValue ValueType = iota + // StringValue JSON element "string" + StringValue + // NumberValue JSON element 100 or 0.10 + NumberValue + // NilValue JSON element null + NilValue + // BoolValue JSON element true or false + BoolValue + // ArrayValue JSON element [] + ArrayValue + // ObjectValue JSON element {} + ObjectValue +) + +var hexDigits []byte +var valueTypes []ValueType + +func init() { + hexDigits = make([]byte, 256) + for i := 0; i < len(hexDigits); i++ { + hexDigits[i] = 255 + } + for i := '0'; i <= '9'; i++ { + hexDigits[i] = byte(i - '0') + } + for i := 'a'; i <= 'f'; i++ { + hexDigits[i] = byte((i - 'a') + 10) + } + for i := 'A'; i <= 'F'; i++ { + hexDigits[i] = byte((i - 'A') + 10) + } + valueTypes = make([]ValueType, 256) + for i := 0; i < len(valueTypes); i++ { + valueTypes[i] = InvalidValue + } + valueTypes['"'] = StringValue + valueTypes['-'] = NumberValue + valueTypes['0'] = NumberValue + valueTypes['1'] = NumberValue + valueTypes['2'] = NumberValue + valueTypes['3'] = NumberValue + valueTypes['4'] = NumberValue + valueTypes['5'] = NumberValue + valueTypes['6'] = NumberValue + valueTypes['7'] = NumberValue + valueTypes['8'] = NumberValue + valueTypes['9'] = NumberValue + valueTypes['t'] = BoolValue + valueTypes['f'] = BoolValue + valueTypes['n'] = NilValue + valueTypes['['] = ArrayValue + valueTypes['{'] = ObjectValue +} + +// Iterator is a io.Reader like object, with JSON specific read functions. +// Error is not returned as return value, but stored as Error member on this iterator instance. +type Iterator struct { + cfg *frozenConfig + reader io.Reader + buf []byte + head int + tail int + captureStartedAt int + captured []byte + Error error + Attachment interface{} // open for customized decoder +} + +// NewIterator creates an empty Iterator instance +func NewIterator(cfg API) *Iterator { + return &Iterator{ + cfg: cfg.(*frozenConfig), + reader: nil, + buf: nil, + head: 0, + tail: 0, + } +} + +// Parse creates an Iterator instance from io.Reader +func Parse(cfg API, reader io.Reader, bufSize int) *Iterator { + return &Iterator{ + cfg: cfg.(*frozenConfig), + reader: reader, + buf: make([]byte, bufSize), + head: 0, + tail: 0, + } +} + +// ParseBytes creates an Iterator instance from byte array +func ParseBytes(cfg API, input []byte) *Iterator { + return &Iterator{ + cfg: cfg.(*frozenConfig), + reader: nil, + buf: input, + head: 0, + tail: len(input), + } +} + +// ParseString creates an Iterator instance from string +func ParseString(cfg API, input string) *Iterator { + return ParseBytes(cfg, []byte(input)) +} + +// Pool returns a pool can provide more iterator with same configuration +func (iter *Iterator) Pool() IteratorPool { + return iter.cfg +} + +// Reset reuse iterator instance by specifying another reader +func (iter *Iterator) Reset(reader io.Reader) *Iterator { + iter.reader = reader + iter.head = 0 + iter.tail = 0 + return iter +} + +// ResetBytes reuse iterator instance by specifying another byte array as input +func (iter *Iterator) ResetBytes(input []byte) *Iterator { + iter.reader = nil + iter.buf = input + iter.head = 0 + iter.tail = len(input) + return iter +} + +// WhatIsNext gets ValueType of relatively next json element +func (iter *Iterator) WhatIsNext() ValueType { + valueType := valueTypes[iter.nextToken()] + iter.unreadByte() + return valueType +} + +func (iter *Iterator) skipWhitespacesWithoutLoadMore() bool { + for i := iter.head; i < iter.tail; i++ { + c := iter.buf[i] + switch c { + case ' ', '\n', '\t', '\r': + continue + } + iter.head = i + return false + } + return true +} + +func (iter *Iterator) isObjectEnd() bool { + c := iter.nextToken() + if c == ',' { + return false + } + if c == '}' { + return true + } + iter.ReportError("isObjectEnd", "object ended prematurely, unexpected char "+string([]byte{c})) + return true +} + +func (iter *Iterator) nextToken() byte { + // a variation of skip whitespaces, returning the next non-whitespace token + for { + for i := iter.head; i < iter.tail; i++ { + c := iter.buf[i] + switch c { + case ' ', '\n', '\t', '\r': + continue + } + iter.head = i + 1 + return c + } + if !iter.loadMore() { + return 0 + } + } +} + +// ReportError record a error in iterator instance with current position. +func (iter *Iterator) ReportError(operation string, msg string) { + if iter.Error != nil { + if iter.Error != io.EOF { + return + } + } + peekStart := iter.head - 10 + if peekStart < 0 { + peekStart = 0 + } + peekEnd := iter.head + 10 + if peekEnd > iter.tail { + peekEnd = iter.tail + } + parsing := string(iter.buf[peekStart:peekEnd]) + contextStart := iter.head - 50 + if contextStart < 0 { + contextStart = 0 + } + contextEnd := iter.head + 50 + if contextEnd > iter.tail { + contextEnd = iter.tail + } + context := string(iter.buf[contextStart:contextEnd]) + iter.Error = fmt.Errorf("%s: %s, error found in #%v byte of ...|%s|..., bigger context ...|%s|...", + operation, msg, iter.head-peekStart, parsing, context) +} + +// CurrentBuffer gets current buffer as string for debugging purpose +func (iter *Iterator) CurrentBuffer() string { + peekStart := iter.head - 10 + if peekStart < 0 { + peekStart = 0 + } + return fmt.Sprintf("parsing #%v byte, around ...|%s|..., whole buffer ...|%s|...", iter.head, + string(iter.buf[peekStart:iter.head]), string(iter.buf[0:iter.tail])) +} + +func (iter *Iterator) readByte() (ret byte) { + if iter.head == iter.tail { + if iter.loadMore() { + ret = iter.buf[iter.head] + iter.head++ + return ret + } + return 0 + } + ret = iter.buf[iter.head] + iter.head++ + return ret +} + +func (iter *Iterator) loadMore() bool { + if iter.reader == nil { + if iter.Error == nil { + iter.head = iter.tail + iter.Error = io.EOF + } + return false + } + if iter.captured != nil { + iter.captured = append(iter.captured, + iter.buf[iter.captureStartedAt:iter.tail]...) + iter.captureStartedAt = 0 + } + for { + n, err := iter.reader.Read(iter.buf) + if n == 0 { + if err != nil { + if iter.Error == nil { + iter.Error = err + } + return false + } + } else { + iter.head = 0 + iter.tail = n + return true + } + } +} + +func (iter *Iterator) unreadByte() { + if iter.Error != nil { + return + } + iter.head-- + return +} + +// Read read the next JSON element as generic interface{}. +func (iter *Iterator) Read() interface{} { + valueType := iter.WhatIsNext() + switch valueType { + case StringValue: + return iter.ReadString() + case NumberValue: + if iter.cfg.configBeforeFrozen.UseNumber { + return json.Number(iter.readNumberAsString()) + } + return iter.ReadFloat64() + case NilValue: + iter.skipFourBytes('n', 'u', 'l', 'l') + return nil + case BoolValue: + return iter.ReadBool() + case ArrayValue: + arr := []interface{}{} + iter.ReadArrayCB(func(iter *Iterator) bool { + var elem interface{} + iter.ReadVal(&elem) + arr = append(arr, elem) + return true + }) + return arr + case ObjectValue: + obj := map[string]interface{}{} + iter.ReadMapCB(func(Iter *Iterator, field string) bool { + var elem interface{} + iter.ReadVal(&elem) + obj[field] = elem + return true + }) + return obj + default: + iter.ReportError("Read", fmt.Sprintf("unexpected value type: %v", valueType)) + return nil + } +} diff --git a/vendor/github.com/json-iterator/go/feature_iter_array.go b/vendor/github.com/json-iterator/go/feature_iter_array.go new file mode 100644 index 000000000..6188cb457 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_iter_array.go @@ -0,0 +1,58 @@ +package jsoniter + +// ReadArray read array element, tells if the array has more element to read. +func (iter *Iterator) ReadArray() (ret bool) { + c := iter.nextToken() + switch c { + case 'n': + iter.skipThreeBytes('u', 'l', 'l') + return false // null + case '[': + c = iter.nextToken() + if c != ']' { + iter.unreadByte() + return true + } + return false + case ']': + return false + case ',': + return true + default: + iter.ReportError("ReadArray", "expect [ or , or ] or n, but found "+string([]byte{c})) + return + } +} + +// ReadArrayCB read array with callback +func (iter *Iterator) ReadArrayCB(callback func(*Iterator) bool) (ret bool) { + c := iter.nextToken() + if c == '[' { + c = iter.nextToken() + if c != ']' { + iter.unreadByte() + if !callback(iter) { + return false + } + c = iter.nextToken() + for c == ',' { + if !callback(iter) { + return false + } + c = iter.nextToken() + } + if c != ']' { + iter.ReportError("ReadArrayCB", "expect ] in the end, but found "+string([]byte{c})) + return false + } + return true + } + return true + } + if c == 'n' { + iter.skipThreeBytes('u', 'l', 'l') + return true // null + } + iter.ReportError("ReadArrayCB", "expect [ or n, but found "+string([]byte{c})) + return false +} diff --git a/vendor/github.com/json-iterator/go/feature_iter_float.go b/vendor/github.com/json-iterator/go/feature_iter_float.go new file mode 100644 index 000000000..86f459912 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_iter_float.go @@ -0,0 +1,341 @@ +package jsoniter + +import ( + "io" + "math/big" + "strconv" + "strings" + "unsafe" +) + +var floatDigits []int8 + +const invalidCharForNumber = int8(-1) +const endOfNumber = int8(-2) +const dotInNumber = int8(-3) + +func init() { + floatDigits = make([]int8, 256) + for i := 0; i < len(floatDigits); i++ { + floatDigits[i] = invalidCharForNumber + } + for i := int8('0'); i <= int8('9'); i++ { + floatDigits[i] = i - int8('0') + } + floatDigits[','] = endOfNumber + floatDigits[']'] = endOfNumber + floatDigits['}'] = endOfNumber + floatDigits[' '] = endOfNumber + floatDigits['\t'] = endOfNumber + floatDigits['\n'] = endOfNumber + floatDigits['.'] = dotInNumber +} + +// ReadBigFloat read big.Float +func (iter *Iterator) ReadBigFloat() (ret *big.Float) { + str := iter.readNumberAsString() + if iter.Error != nil && iter.Error != io.EOF { + return nil + } + prec := 64 + if len(str) > prec { + prec = len(str) + } + val, _, err := big.ParseFloat(str, 10, uint(prec), big.ToZero) + if err != nil { + iter.Error = err + return nil + } + return val +} + +// ReadBigInt read big.Int +func (iter *Iterator) ReadBigInt() (ret *big.Int) { + str := iter.readNumberAsString() + if iter.Error != nil && iter.Error != io.EOF { + return nil + } + ret = big.NewInt(0) + var success bool + ret, success = ret.SetString(str, 10) + if !success { + iter.ReportError("ReadBigInt", "invalid big int") + return nil + } + return ret +} + +//ReadFloat32 read float32 +func (iter *Iterator) ReadFloat32() (ret float32) { + c := iter.nextToken() + if c == '-' { + return -iter.readPositiveFloat32() + } + iter.unreadByte() + return iter.readPositiveFloat32() +} + +func (iter *Iterator) readPositiveFloat32() (ret float32) { + value := uint64(0) + c := byte(' ') + i := iter.head + // first char + if i == iter.tail { + return iter.readFloat32SlowPath() + } + c = iter.buf[i] + i++ + ind := floatDigits[c] + switch ind { + case invalidCharForNumber: + return iter.readFloat32SlowPath() + case endOfNumber: + iter.ReportError("readFloat32", "empty number") + return + case dotInNumber: + iter.ReportError("readFloat32", "leading dot is invalid") + return + case 0: + if i == iter.tail { + return iter.readFloat32SlowPath() + } + c = iter.buf[i] + switch c { + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + iter.ReportError("readFloat32", "leading zero is invalid") + return + } + } + value = uint64(ind) + // chars before dot +non_decimal_loop: + for ; i < iter.tail; i++ { + c = iter.buf[i] + ind := floatDigits[c] + switch ind { + case invalidCharForNumber: + return iter.readFloat32SlowPath() + case endOfNumber: + iter.head = i + return float32(value) + case dotInNumber: + break non_decimal_loop + } + if value > uint64SafeToMultiple10 { + return iter.readFloat32SlowPath() + } + value = (value << 3) + (value << 1) + uint64(ind) // value = value * 10 + ind; + } + // chars after dot + if c == '.' { + i++ + decimalPlaces := 0 + if i == iter.tail { + return iter.readFloat32SlowPath() + } + for ; i < iter.tail; i++ { + c = iter.buf[i] + ind := floatDigits[c] + switch ind { + case endOfNumber: + if decimalPlaces > 0 && decimalPlaces < len(pow10) { + iter.head = i + return float32(float64(value) / float64(pow10[decimalPlaces])) + } + // too many decimal places + return iter.readFloat32SlowPath() + case invalidCharForNumber: + fallthrough + case dotInNumber: + return iter.readFloat32SlowPath() + } + decimalPlaces++ + if value > uint64SafeToMultiple10 { + return iter.readFloat32SlowPath() + } + value = (value << 3) + (value << 1) + uint64(ind) + } + } + return iter.readFloat32SlowPath() +} + +func (iter *Iterator) readNumberAsString() (ret string) { + strBuf := [16]byte{} + str := strBuf[0:0] +load_loop: + for { + for i := iter.head; i < iter.tail; i++ { + c := iter.buf[i] + switch c { + case '+', '-', '.', 'e', 'E', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + str = append(str, c) + continue + default: + iter.head = i + break load_loop + } + } + if !iter.loadMore() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF { + return + } + if len(str) == 0 { + iter.ReportError("readNumberAsString", "invalid number") + } + return *(*string)(unsafe.Pointer(&str)) +} + +func (iter *Iterator) readFloat32SlowPath() (ret float32) { + str := iter.readNumberAsString() + if iter.Error != nil && iter.Error != io.EOF { + return + } + errMsg := validateFloat(str) + if errMsg != "" { + iter.ReportError("readFloat32SlowPath", errMsg) + return + } + val, err := strconv.ParseFloat(str, 32) + if err != nil { + iter.Error = err + return + } + return float32(val) +} + +// ReadFloat64 read float64 +func (iter *Iterator) ReadFloat64() (ret float64) { + c := iter.nextToken() + if c == '-' { + return -iter.readPositiveFloat64() + } + iter.unreadByte() + return iter.readPositiveFloat64() +} + +func (iter *Iterator) readPositiveFloat64() (ret float64) { + value := uint64(0) + c := byte(' ') + i := iter.head + // first char + if i == iter.tail { + return iter.readFloat64SlowPath() + } + c = iter.buf[i] + i++ + ind := floatDigits[c] + switch ind { + case invalidCharForNumber: + return iter.readFloat64SlowPath() + case endOfNumber: + iter.ReportError("readFloat64", "empty number") + return + case dotInNumber: + iter.ReportError("readFloat64", "leading dot is invalid") + return + case 0: + if i == iter.tail { + return iter.readFloat64SlowPath() + } + c = iter.buf[i] + switch c { + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + iter.ReportError("readFloat64", "leading zero is invalid") + return + } + } + value = uint64(ind) + // chars before dot +non_decimal_loop: + for ; i < iter.tail; i++ { + c = iter.buf[i] + ind := floatDigits[c] + switch ind { + case invalidCharForNumber: + return iter.readFloat64SlowPath() + case endOfNumber: + iter.head = i + return float64(value) + case dotInNumber: + break non_decimal_loop + } + if value > uint64SafeToMultiple10 { + return iter.readFloat64SlowPath() + } + value = (value << 3) + (value << 1) + uint64(ind) // value = value * 10 + ind; + } + // chars after dot + if c == '.' { + i++ + decimalPlaces := 0 + if i == iter.tail { + return iter.readFloat64SlowPath() + } + for ; i < iter.tail; i++ { + c = iter.buf[i] + ind := floatDigits[c] + switch ind { + case endOfNumber: + if decimalPlaces > 0 && decimalPlaces < len(pow10) { + iter.head = i + return float64(value) / float64(pow10[decimalPlaces]) + } + // too many decimal places + return iter.readFloat64SlowPath() + case invalidCharForNumber: + fallthrough + case dotInNumber: + return iter.readFloat64SlowPath() + } + decimalPlaces++ + if value > uint64SafeToMultiple10 { + return iter.readFloat64SlowPath() + } + value = (value << 3) + (value << 1) + uint64(ind) + } + } + return iter.readFloat64SlowPath() +} + +func (iter *Iterator) readFloat64SlowPath() (ret float64) { + str := iter.readNumberAsString() + if iter.Error != nil && iter.Error != io.EOF { + return + } + errMsg := validateFloat(str) + if errMsg != "" { + iter.ReportError("readFloat64SlowPath", errMsg) + return + } + val, err := strconv.ParseFloat(str, 64) + if err != nil { + iter.Error = err + return + } + return val +} + +func validateFloat(str string) string { + // strconv.ParseFloat is not validating `1.` or `1.e1` + if len(str) == 0 { + return "empty number" + } + if str[0] == '-' { + return "-- is not valid" + } + dotPos := strings.IndexByte(str, '.') + if dotPos != -1 { + if dotPos == len(str)-1 { + return "dot can not be last character" + } + switch str[dotPos+1] { + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + default: + return "missing digit after dot" + } + } + return "" +} diff --git a/vendor/github.com/json-iterator/go/feature_iter_int.go b/vendor/github.com/json-iterator/go/feature_iter_int.go new file mode 100644 index 000000000..6137348cd --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_iter_int.go @@ -0,0 +1,268 @@ +package jsoniter + +import ( + "math" + "strconv" +) + +var intDigits []int8 + +const uint32SafeToMultiply10 = uint32(0xffffffff)/10 - 1 +const uint64SafeToMultiple10 = uint64(0xffffffffffffffff)/10 - 1 + +func init() { + intDigits = make([]int8, 256) + for i := 0; i < len(intDigits); i++ { + intDigits[i] = invalidCharForNumber + } + for i := int8('0'); i <= int8('9'); i++ { + intDigits[i] = i - int8('0') + } +} + +// ReadUint read uint +func (iter *Iterator) ReadUint() uint { + return uint(iter.ReadUint64()) +} + +// ReadInt read int +func (iter *Iterator) ReadInt() int { + return int(iter.ReadInt64()) +} + +// ReadInt8 read int8 +func (iter *Iterator) ReadInt8() (ret int8) { + c := iter.nextToken() + if c == '-' { + val := iter.readUint32(iter.readByte()) + if val > math.MaxInt8+1 { + iter.ReportError("ReadInt8", "overflow: "+strconv.FormatInt(int64(val), 10)) + return + } + return -int8(val) + } + val := iter.readUint32(c) + if val > math.MaxInt8 { + iter.ReportError("ReadInt8", "overflow: "+strconv.FormatInt(int64(val), 10)) + return + } + return int8(val) +} + +// ReadUint8 read uint8 +func (iter *Iterator) ReadUint8() (ret uint8) { + val := iter.readUint32(iter.nextToken()) + if val > math.MaxUint8 { + iter.ReportError("ReadUint8", "overflow: "+strconv.FormatInt(int64(val), 10)) + return + } + return uint8(val) +} + +// ReadInt16 read int16 +func (iter *Iterator) ReadInt16() (ret int16) { + c := iter.nextToken() + if c == '-' { + val := iter.readUint32(iter.readByte()) + if val > math.MaxInt16+1 { + iter.ReportError("ReadInt16", "overflow: "+strconv.FormatInt(int64(val), 10)) + return + } + return -int16(val) + } + val := iter.readUint32(c) + if val > math.MaxInt16 { + iter.ReportError("ReadInt16", "overflow: "+strconv.FormatInt(int64(val), 10)) + return + } + return int16(val) +} + +// ReadUint16 read uint16 +func (iter *Iterator) ReadUint16() (ret uint16) { + val := iter.readUint32(iter.nextToken()) + if val > math.MaxUint16 { + iter.ReportError("ReadUint16", "overflow: "+strconv.FormatInt(int64(val), 10)) + return + } + return uint16(val) +} + +// ReadInt32 read int32 +func (iter *Iterator) ReadInt32() (ret int32) { + c := iter.nextToken() + if c == '-' { + val := iter.readUint32(iter.readByte()) + if val > math.MaxInt32+1 { + iter.ReportError("ReadInt32", "overflow: "+strconv.FormatInt(int64(val), 10)) + return + } + return -int32(val) + } + val := iter.readUint32(c) + if val > math.MaxInt32 { + iter.ReportError("ReadInt32", "overflow: "+strconv.FormatInt(int64(val), 10)) + return + } + return int32(val) +} + +// ReadUint32 read uint32 +func (iter *Iterator) ReadUint32() (ret uint32) { + return iter.readUint32(iter.nextToken()) +} + +func (iter *Iterator) readUint32(c byte) (ret uint32) { + defer func() { + if iter.head < len(iter.buf) && iter.buf[iter.head] == '.' { + iter.ReportError("readUint32", "can not decode float as int") + } + }() + ind := intDigits[c] + if ind == 0 { + return 0 // single zero + } + if ind == invalidCharForNumber { + iter.ReportError("readUint32", "unexpected character: "+string([]byte{byte(ind)})) + return + } + value := uint32(ind) + if iter.tail-iter.head > 10 { + i := iter.head + ind2 := intDigits[iter.buf[i]] + if ind2 == invalidCharForNumber { + iter.head = i + return value + } + i++ + ind3 := intDigits[iter.buf[i]] + if ind3 == invalidCharForNumber { + iter.head = i + return value*10 + uint32(ind2) + } + //iter.head = i + 1 + //value = value * 100 + uint32(ind2) * 10 + uint32(ind3) + i++ + ind4 := intDigits[iter.buf[i]] + if ind4 == invalidCharForNumber { + iter.head = i + return value*100 + uint32(ind2)*10 + uint32(ind3) + } + i++ + ind5 := intDigits[iter.buf[i]] + if ind5 == invalidCharForNumber { + iter.head = i + return value*1000 + uint32(ind2)*100 + uint32(ind3)*10 + uint32(ind4) + } + i++ + ind6 := intDigits[iter.buf[i]] + if ind6 == invalidCharForNumber { + iter.head = i + return value*10000 + uint32(ind2)*1000 + uint32(ind3)*100 + uint32(ind4)*10 + uint32(ind5) + } + i++ + ind7 := intDigits[iter.buf[i]] + if ind7 == invalidCharForNumber { + iter.head = i + return value*100000 + uint32(ind2)*10000 + uint32(ind3)*1000 + uint32(ind4)*100 + uint32(ind5)*10 + uint32(ind6) + } + i++ + ind8 := intDigits[iter.buf[i]] + if ind8 == invalidCharForNumber { + iter.head = i + return value*1000000 + uint32(ind2)*100000 + uint32(ind3)*10000 + uint32(ind4)*1000 + uint32(ind5)*100 + uint32(ind6)*10 + uint32(ind7) + } + i++ + ind9 := intDigits[iter.buf[i]] + value = value*10000000 + uint32(ind2)*1000000 + uint32(ind3)*100000 + uint32(ind4)*10000 + uint32(ind5)*1000 + uint32(ind6)*100 + uint32(ind7)*10 + uint32(ind8) + iter.head = i + if ind9 == invalidCharForNumber { + return value + } + } + for { + for i := iter.head; i < iter.tail; i++ { + ind = intDigits[iter.buf[i]] + if ind == invalidCharForNumber { + iter.head = i + return value + } + if value > uint32SafeToMultiply10 { + value2 := (value << 3) + (value << 1) + uint32(ind) + if value2 < value { + iter.ReportError("readUint32", "overflow") + return + } + value = value2 + continue + } + value = (value << 3) + (value << 1) + uint32(ind) + } + if !iter.loadMore() { + return value + } + } +} + +// ReadInt64 read int64 +func (iter *Iterator) ReadInt64() (ret int64) { + c := iter.nextToken() + if c == '-' { + val := iter.readUint64(iter.readByte()) + if val > math.MaxInt64+1 { + iter.ReportError("ReadInt64", "overflow: "+strconv.FormatUint(uint64(val), 10)) + return + } + return -int64(val) + } + val := iter.readUint64(c) + if val > math.MaxInt64 { + iter.ReportError("ReadInt64", "overflow: "+strconv.FormatUint(uint64(val), 10)) + return + } + return int64(val) +} + +// ReadUint64 read uint64 +func (iter *Iterator) ReadUint64() uint64 { + return iter.readUint64(iter.nextToken()) +} + +func (iter *Iterator) readUint64(c byte) (ret uint64) { + defer func() { + if iter.head < len(iter.buf) && iter.buf[iter.head] == '.' { + iter.ReportError("readUint64", "can not decode float as int") + } + }() + ind := intDigits[c] + if ind == 0 { + return 0 // single zero + } + if ind == invalidCharForNumber { + iter.ReportError("readUint64", "unexpected character: "+string([]byte{byte(ind)})) + return + } + value := uint64(ind) + for { + for i := iter.head; i < iter.tail; i++ { + ind = intDigits[iter.buf[i]] + if ind == invalidCharForNumber { + iter.head = i + return value + } + if value > uint64SafeToMultiple10 { + value2 := (value << 3) + (value << 1) + uint64(ind) + if value2 < value { + iter.ReportError("readUint64", "overflow") + return + } + value = value2 + continue + } + value = (value << 3) + (value << 1) + uint64(ind) + } + if !iter.loadMore() { + return value + } + } +} diff --git a/vendor/github.com/json-iterator/go/feature_iter_object.go b/vendor/github.com/json-iterator/go/feature_iter_object.go new file mode 100644 index 000000000..dfd91fa60 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_iter_object.go @@ -0,0 +1,267 @@ +package jsoniter + +import ( + "fmt" + "unicode" + "unsafe" +) + +// ReadObject read one field from object. +// If object ended, returns empty string. +// Otherwise, returns the field name. +func (iter *Iterator) ReadObject() (ret string) { + c := iter.nextToken() + switch c { + case 'n': + iter.skipThreeBytes('u', 'l', 'l') + return "" // null + case '{': + c = iter.nextToken() + if c == '"' { + iter.unreadByte() + if iter.cfg.objectFieldMustBeSimpleString { + return string(iter.readObjectFieldAsBytes()) + } else { + field := iter.ReadString() + c = iter.nextToken() + if c != ':' { + iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c})) + } + return field + } + } + if c == '}' { + return "" // end of object + } + iter.ReportError("ReadObject", `expect " after {, but found `+string([]byte{c})) + return + case ',': + if iter.cfg.objectFieldMustBeSimpleString { + return string(iter.readObjectFieldAsBytes()) + } else { + field := iter.ReadString() + c = iter.nextToken() + if c != ':' { + iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c})) + } + return field + } + case '}': + return "" // end of object + default: + iter.ReportError("ReadObject", fmt.Sprintf(`expect { or , or } or n, but found %s`, string([]byte{c}))) + return + } +} + +func (iter *Iterator) readFieldHash() int32 { + hash := int64(0x811c9dc5) + c := iter.nextToken() + if c == '"' { + for { + for i := iter.head; i < iter.tail; i++ { + // require ascii string and no escape + b := iter.buf[i] + if !iter.cfg.objectFieldMustBeSimpleString && b == '\\' { + iter.head = i + for _, b := range iter.readStringSlowPath() { + if 'A' <= b && b <= 'Z' { + b += 'a' - 'A' + } + hash ^= int64(b) + hash *= 0x1000193 + } + c = iter.nextToken() + if c != ':' { + iter.ReportError("readFieldHash", `expect :, but found `+string([]byte{c})) + return 0 + } + return int32(hash) + } + if b == '"' { + iter.head = i + 1 + c = iter.nextToken() + if c != ':' { + iter.ReportError("readFieldHash", `expect :, but found `+string([]byte{c})) + return 0 + } + return int32(hash) + } + if 'A' <= b && b <= 'Z' { + b += 'a' - 'A' + } + hash ^= int64(b) + hash *= 0x1000193 + } + if !iter.loadMore() { + iter.ReportError("readFieldHash", `incomplete field name`) + return 0 + } + } + } + iter.ReportError("readFieldHash", `expect ", but found `+string([]byte{c})) + return 0 +} + +func calcHash(str string) int32 { + hash := int64(0x811c9dc5) + for _, b := range str { + hash ^= int64(unicode.ToLower(b)) + hash *= 0x1000193 + } + return int32(hash) +} + +// ReadObjectCB read object with callback, the key is ascii only and field name not copied +func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool { + c := iter.nextToken() + var fieldBytes []byte + var field string + if c == '{' { + c = iter.nextToken() + if c == '"' { + iter.unreadByte() + if iter.cfg.objectFieldMustBeSimpleString { + fieldBytes = iter.readObjectFieldAsBytes() + field = *(*string)(unsafe.Pointer(&fieldBytes)) + } else { + field = iter.ReadString() + c = iter.nextToken() + if c != ':' { + iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c})) + } + } + if !callback(iter, field) { + return false + } + c = iter.nextToken() + for c == ',' { + if iter.cfg.objectFieldMustBeSimpleString { + fieldBytes = iter.readObjectFieldAsBytes() + field = *(*string)(unsafe.Pointer(&fieldBytes)) + } else { + field = iter.ReadString() + c = iter.nextToken() + if c != ':' { + iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c})) + } + } + if !callback(iter, field) { + return false + } + c = iter.nextToken() + } + if c != '}' { + iter.ReportError("ReadObjectCB", `object not ended with }`) + return false + } + return true + } + if c == '}' { + return true + } + iter.ReportError("ReadObjectCB", `expect " after }, but found `+string([]byte{c})) + return false + } + if c == 'n' { + iter.skipThreeBytes('u', 'l', 'l') + return true // null + } + iter.ReportError("ReadObjectCB", `expect { or n, but found `+string([]byte{c})) + return false +} + +// ReadMapCB read map with callback, the key can be any string +func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool { + c := iter.nextToken() + if c == '{' { + c = iter.nextToken() + if c == '"' { + iter.unreadByte() + field := iter.ReadString() + if iter.nextToken() != ':' { + iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c})) + return false + } + if !callback(iter, field) { + return false + } + c = iter.nextToken() + for c == ',' { + field = iter.ReadString() + if iter.nextToken() != ':' { + iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c})) + return false + } + if !callback(iter, field) { + return false + } + c = iter.nextToken() + } + if c != '}' { + iter.ReportError("ReadMapCB", `object not ended with }`) + return false + } + return true + } + if c == '}' { + return true + } + iter.ReportError("ReadMapCB", `expect " after }, but found `+string([]byte{c})) + return false + } + if c == 'n' { + iter.skipThreeBytes('u', 'l', 'l') + return true // null + } + iter.ReportError("ReadMapCB", `expect { or n, but found `+string([]byte{c})) + return false +} + +func (iter *Iterator) readObjectStart() bool { + c := iter.nextToken() + if c == '{' { + c = iter.nextToken() + if c == '}' { + return false + } + iter.unreadByte() + return true + } else if c == 'n' { + iter.skipThreeBytes('u', 'l', 'l') + return false + } + iter.ReportError("readObjectStart", "expect { or n, but found "+string([]byte{c})) + return false +} + +func (iter *Iterator) readObjectFieldAsBytes() (ret []byte) { + str := iter.ReadStringAsSlice() + if iter.skipWhitespacesWithoutLoadMore() { + if ret == nil { + ret = make([]byte, len(str)) + copy(ret, str) + } + if !iter.loadMore() { + return + } + } + if iter.buf[iter.head] != ':' { + iter.ReportError("readObjectFieldAsBytes", "expect : after object field, but found "+string([]byte{iter.buf[iter.head]})) + return + } + iter.head++ + if iter.skipWhitespacesWithoutLoadMore() { + if ret == nil { + ret = make([]byte, len(str)) + copy(ret, str) + } + if !iter.loadMore() { + return + } + } + if ret == nil { + return str + } + return ret +} diff --git a/vendor/github.com/json-iterator/go/feature_iter_skip.go b/vendor/github.com/json-iterator/go/feature_iter_skip.go new file mode 100644 index 000000000..f58beb913 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_iter_skip.go @@ -0,0 +1,129 @@ +package jsoniter + +import "fmt" + +// ReadNil reads a json object as nil and +// returns whether it's a nil or not +func (iter *Iterator) ReadNil() (ret bool) { + c := iter.nextToken() + if c == 'n' { + iter.skipThreeBytes('u', 'l', 'l') // null + return true + } + iter.unreadByte() + return false +} + +// ReadBool reads a json object as BoolValue +func (iter *Iterator) ReadBool() (ret bool) { + c := iter.nextToken() + if c == 't' { + iter.skipThreeBytes('r', 'u', 'e') + return true + } + if c == 'f' { + iter.skipFourBytes('a', 'l', 's', 'e') + return false + } + iter.ReportError("ReadBool", "expect t or f, but found "+string([]byte{c})) + return +} + +// SkipAndReturnBytes skip next JSON element, and return its content as []byte. +// The []byte can be kept, it is a copy of data. +func (iter *Iterator) SkipAndReturnBytes() []byte { + iter.startCapture(iter.head) + iter.Skip() + return iter.stopCapture() +} + +type captureBuffer struct { + startedAt int + captured []byte +} + +func (iter *Iterator) startCapture(captureStartedAt int) { + if iter.captured != nil { + panic("already in capture mode") + } + iter.captureStartedAt = captureStartedAt + iter.captured = make([]byte, 0, 32) +} + +func (iter *Iterator) stopCapture() []byte { + if iter.captured == nil { + panic("not in capture mode") + } + captured := iter.captured + remaining := iter.buf[iter.captureStartedAt:iter.head] + iter.captureStartedAt = -1 + iter.captured = nil + if len(captured) == 0 { + copied := make([]byte, len(remaining)) + copy(copied, remaining) + return copied + } + captured = append(captured, remaining...) + return captured +} + +// Skip skips a json object and positions to relatively the next json object +func (iter *Iterator) Skip() { + c := iter.nextToken() + switch c { + case '"': + iter.skipString() + case 'n': + iter.skipThreeBytes('u', 'l', 'l') // null + case 't': + iter.skipThreeBytes('r', 'u', 'e') // true + case 'f': + iter.skipFourBytes('a', 'l', 's', 'e') // false + case '0': + iter.unreadByte() + iter.ReadFloat32() + case '-', '1', '2', '3', '4', '5', '6', '7', '8', '9': + iter.skipNumber() + case '[': + iter.skipArray() + case '{': + iter.skipObject() + default: + iter.ReportError("Skip", fmt.Sprintf("do not know how to skip: %v", c)) + return + } +} + +func (iter *Iterator) skipFourBytes(b1, b2, b3, b4 byte) { + if iter.readByte() != b1 { + iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4}))) + return + } + if iter.readByte() != b2 { + iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4}))) + return + } + if iter.readByte() != b3 { + iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4}))) + return + } + if iter.readByte() != b4 { + iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4}))) + return + } +} + +func (iter *Iterator) skipThreeBytes(b1, b2, b3 byte) { + if iter.readByte() != b1 { + iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3}))) + return + } + if iter.readByte() != b2 { + iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3}))) + return + } + if iter.readByte() != b3 { + iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3}))) + return + } +} diff --git a/vendor/github.com/json-iterator/go/feature_iter_skip_sloppy.go b/vendor/github.com/json-iterator/go/feature_iter_skip_sloppy.go new file mode 100644 index 000000000..8fcdc3b69 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_iter_skip_sloppy.go @@ -0,0 +1,144 @@ +//+build jsoniter_sloppy + +package jsoniter + +// sloppy but faster implementation, do not validate the input json + +func (iter *Iterator) skipNumber() { + for { + for i := iter.head; i < iter.tail; i++ { + c := iter.buf[i] + switch c { + case ' ', '\n', '\r', '\t', ',', '}', ']': + iter.head = i + return + } + } + if !iter.loadMore() { + return + } + } +} + +func (iter *Iterator) skipArray() { + level := 1 + for { + for i := iter.head; i < iter.tail; i++ { + switch iter.buf[i] { + case '"': // If inside string, skip it + iter.head = i + 1 + iter.skipString() + i = iter.head - 1 // it will be i++ soon + case '[': // If open symbol, increase level + level++ + case ']': // If close symbol, increase level + level-- + + // If we have returned to the original level, we're done + if level == 0 { + iter.head = i + 1 + return + } + } + } + if !iter.loadMore() { + iter.ReportError("skipObject", "incomplete array") + return + } + } +} + +func (iter *Iterator) skipObject() { + level := 1 + for { + for i := iter.head; i < iter.tail; i++ { + switch iter.buf[i] { + case '"': // If inside string, skip it + iter.head = i + 1 + iter.skipString() + i = iter.head - 1 // it will be i++ soon + case '{': // If open symbol, increase level + level++ + case '}': // If close symbol, increase level + level-- + + // If we have returned to the original level, we're done + if level == 0 { + iter.head = i + 1 + return + } + } + } + if !iter.loadMore() { + iter.ReportError("skipObject", "incomplete object") + return + } + } +} + +func (iter *Iterator) skipString() { + for { + end, escaped := iter.findStringEnd() + if end == -1 { + if !iter.loadMore() { + iter.ReportError("skipString", "incomplete string") + return + } + if escaped { + iter.head = 1 // skip the first char as last char read is \ + } + } else { + iter.head = end + return + } + } +} + +// adapted from: https://github.com/buger/jsonparser/blob/master/parser.go +// Tries to find the end of string +// Support if string contains escaped quote symbols. +func (iter *Iterator) findStringEnd() (int, bool) { + escaped := false + for i := iter.head; i < iter.tail; i++ { + c := iter.buf[i] + if c == '"' { + if !escaped { + return i + 1, false + } + j := i - 1 + for { + if j < iter.head || iter.buf[j] != '\\' { + // even number of backslashes + // either end of buffer, or " found + return i + 1, true + } + j-- + if j < iter.head || iter.buf[j] != '\\' { + // odd number of backslashes + // it is \" or \\\" + break + } + j-- + } + } else if c == '\\' { + escaped = true + } + } + j := iter.tail - 1 + for { + if j < iter.head || iter.buf[j] != '\\' { + // even number of backslashes + // either end of buffer, or " found + return -1, false // do not end with \ + } + j-- + if j < iter.head || iter.buf[j] != '\\' { + // odd number of backslashes + // it is \" or \\\" + break + } + j-- + + } + return -1, true // end with \ +} diff --git a/vendor/github.com/json-iterator/go/feature_iter_skip_strict.go b/vendor/github.com/json-iterator/go/feature_iter_skip_strict.go new file mode 100644 index 000000000..f67bc2e83 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_iter_skip_strict.go @@ -0,0 +1,89 @@ +//+build !jsoniter_sloppy + +package jsoniter + +import "fmt" + +func (iter *Iterator) skipNumber() { + if !iter.trySkipNumber() { + iter.unreadByte() + iter.ReadFloat32() + } +} + +func (iter *Iterator) trySkipNumber() bool { + dotFound := false + for i := iter.head; i < iter.tail; i++ { + c := iter.buf[i] + switch c { + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + case '.': + if dotFound { + iter.ReportError("validateNumber", `more than one dot found in number`) + return true // already failed + } + if i+1 == iter.tail { + return false + } + c = iter.buf[i+1] + switch c { + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + default: + iter.ReportError("validateNumber", `missing digit after dot`) + return true // already failed + } + dotFound = true + default: + switch c { + case ',', ']', '}', ' ', '\t', '\n', '\r': + if iter.head == i { + return false // if - without following digits + } + iter.head = i + return true // must be valid + } + return false // may be invalid + } + } + return false +} + +func (iter *Iterator) skipString() { + if !iter.trySkipString() { + iter.unreadByte() + iter.ReadString() + } +} + +func (iter *Iterator) trySkipString() bool { + for i := iter.head; i < iter.tail; i++ { + c := iter.buf[i] + if c == '"' { + iter.head = i + 1 + return true // valid + } else if c == '\\' { + return false + } else if c < ' ' { + iter.ReportError("trySkipString", + fmt.Sprintf(`invalid control character found: %d`, c)) + return true // already failed + } + } + return false +} + +func (iter *Iterator) skipObject() { + iter.unreadByte() + iter.ReadObjectCB(func(iter *Iterator, field string) bool { + iter.Skip() + return true + }) +} + +func (iter *Iterator) skipArray() { + iter.unreadByte() + iter.ReadArrayCB(func(iter *Iterator) bool { + iter.Skip() + return true + }) +} diff --git a/vendor/github.com/json-iterator/go/feature_iter_string.go b/vendor/github.com/json-iterator/go/feature_iter_string.go new file mode 100644 index 000000000..adc487ea8 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_iter_string.go @@ -0,0 +1,215 @@ +package jsoniter + +import ( + "fmt" + "unicode/utf16" +) + +// ReadString read string from iterator +func (iter *Iterator) ReadString() (ret string) { + c := iter.nextToken() + if c == '"' { + for i := iter.head; i < iter.tail; i++ { + c := iter.buf[i] + if c == '"' { + ret = string(iter.buf[iter.head:i]) + iter.head = i + 1 + return ret + } else if c == '\\' { + break + } else if c < ' ' { + iter.ReportError("ReadString", + fmt.Sprintf(`invalid control character found: %d`, c)) + return + } + } + return iter.readStringSlowPath() + } else if c == 'n' { + iter.skipThreeBytes('u', 'l', 'l') + return "" + } + iter.ReportError("ReadString", `expects " or n, but found `+string([]byte{c})) + return +} + +func (iter *Iterator) readStringSlowPath() (ret string) { + var str []byte + var c byte + for iter.Error == nil { + c = iter.readByte() + if c == '"' { + return string(str) + } + if c == '\\' { + c = iter.readByte() + str = iter.readEscapedChar(c, str) + } else { + str = append(str, c) + } + } + iter.ReportError("readStringSlowPath", "unexpected end of input") + return +} + +func (iter *Iterator) readEscapedChar(c byte, str []byte) []byte { + switch c { + case 'u': + r := iter.readU4() + if utf16.IsSurrogate(r) { + c = iter.readByte() + if iter.Error != nil { + return nil + } + if c != '\\' { + iter.unreadByte() + str = appendRune(str, r) + return str + } + c = iter.readByte() + if iter.Error != nil { + return nil + } + if c != 'u' { + str = appendRune(str, r) + return iter.readEscapedChar(c, str) + } + r2 := iter.readU4() + if iter.Error != nil { + return nil + } + combined := utf16.DecodeRune(r, r2) + if combined == '\uFFFD' { + str = appendRune(str, r) + str = appendRune(str, r2) + } else { + str = appendRune(str, combined) + } + } else { + str = appendRune(str, r) + } + case '"': + str = append(str, '"') + case '\\': + str = append(str, '\\') + case '/': + str = append(str, '/') + case 'b': + str = append(str, '\b') + case 'f': + str = append(str, '\f') + case 'n': + str = append(str, '\n') + case 'r': + str = append(str, '\r') + case 't': + str = append(str, '\t') + default: + iter.ReportError("readEscapedChar", + `invalid escape char after \`) + return nil + } + return str +} + +// ReadStringAsSlice read string from iterator without copying into string form. +// The []byte can not be kept, as it will change after next iterator call. +func (iter *Iterator) ReadStringAsSlice() (ret []byte) { + c := iter.nextToken() + if c == '"' { + for i := iter.head; i < iter.tail; i++ { + // require ascii string and no escape + // for: field name, base64, number + if iter.buf[i] == '"' { + // fast path: reuse the underlying buffer + ret = iter.buf[iter.head:i] + iter.head = i + 1 + return ret + } + } + readLen := iter.tail - iter.head + copied := make([]byte, readLen, readLen*2) + copy(copied, iter.buf[iter.head:iter.tail]) + iter.head = iter.tail + for iter.Error == nil { + c := iter.readByte() + if c == '"' { + return copied + } + copied = append(copied, c) + } + return copied + } + iter.ReportError("ReadStringAsSlice", `expects " or n, but found `+string([]byte{c})) + return +} + +func (iter *Iterator) readU4() (ret rune) { + for i := 0; i < 4; i++ { + c := iter.readByte() + if iter.Error != nil { + return + } + if c >= '0' && c <= '9' { + ret = ret*16 + rune(c-'0') + } else if c >= 'a' && c <= 'f' { + ret = ret*16 + rune(c-'a'+10) + } else if c >= 'A' && c <= 'F' { + ret = ret*16 + rune(c-'A'+10) + } else { + iter.ReportError("readU4", "expects 0~9 or a~f, but found "+string([]byte{c})) + return + } + } + return ret +} + +const ( + t1 = 0x00 // 0000 0000 + tx = 0x80 // 1000 0000 + t2 = 0xC0 // 1100 0000 + t3 = 0xE0 // 1110 0000 + t4 = 0xF0 // 1111 0000 + t5 = 0xF8 // 1111 1000 + + maskx = 0x3F // 0011 1111 + mask2 = 0x1F // 0001 1111 + mask3 = 0x0F // 0000 1111 + mask4 = 0x07 // 0000 0111 + + rune1Max = 1<<7 - 1 + rune2Max = 1<<11 - 1 + rune3Max = 1<<16 - 1 + + surrogateMin = 0xD800 + surrogateMax = 0xDFFF + + maxRune = '\U0010FFFF' // Maximum valid Unicode code point. + runeError = '\uFFFD' // the "error" Rune or "Unicode replacement character" +) + +func appendRune(p []byte, r rune) []byte { + // Negative values are erroneous. Making it unsigned addresses the problem. + switch i := uint32(r); { + case i <= rune1Max: + p = append(p, byte(r)) + return p + case i <= rune2Max: + p = append(p, t2|byte(r>>6)) + p = append(p, tx|byte(r)&maskx) + return p + case i > maxRune, surrogateMin <= i && i <= surrogateMax: + r = runeError + fallthrough + case i <= rune3Max: + p = append(p, t3|byte(r>>12)) + p = append(p, tx|byte(r>>6)&maskx) + p = append(p, tx|byte(r)&maskx) + return p + default: + p = append(p, t4|byte(r>>18)) + p = append(p, tx|byte(r>>12)&maskx) + p = append(p, tx|byte(r>>6)&maskx) + p = append(p, tx|byte(r)&maskx) + return p + } +} diff --git a/vendor/github.com/json-iterator/go/feature_json_number.go b/vendor/github.com/json-iterator/go/feature_json_number.go new file mode 100644 index 000000000..e187b200a --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_json_number.go @@ -0,0 +1,31 @@ +package jsoniter + +import ( + "encoding/json" + "strconv" +) + +type Number string + +// String returns the literal text of the number. +func (n Number) String() string { return string(n) } + +// Float64 returns the number as a float64. +func (n Number) Float64() (float64, error) { + return strconv.ParseFloat(string(n), 64) +} + +// Int64 returns the number as an int64. +func (n Number) Int64() (int64, error) { + return strconv.ParseInt(string(n), 10, 64) +} + +func CastJsonNumber(val interface{}) (string, bool) { + switch typedVal := val.(type) { + case json.Number: + return string(typedVal), true + case Number: + return string(typedVal), true + } + return "", false +} diff --git a/vendor/github.com/json-iterator/go/feature_pool.go b/vendor/github.com/json-iterator/go/feature_pool.go new file mode 100644 index 000000000..52d38e685 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_pool.go @@ -0,0 +1,59 @@ +package jsoniter + +import ( + "io" +) + +// IteratorPool a thread safe pool of iterators with same configuration +type IteratorPool interface { + BorrowIterator(data []byte) *Iterator + ReturnIterator(iter *Iterator) +} + +// StreamPool a thread safe pool of streams with same configuration +type StreamPool interface { + BorrowStream(writer io.Writer) *Stream + ReturnStream(stream *Stream) +} + +func (cfg *frozenConfig) BorrowStream(writer io.Writer) *Stream { + select { + case stream := <-cfg.streamPool: + stream.Reset(writer) + return stream + default: + return NewStream(cfg, writer, 512) + } +} + +func (cfg *frozenConfig) ReturnStream(stream *Stream) { + stream.Error = nil + stream.Attachment = nil + select { + case cfg.streamPool <- stream: + return + default: + return + } +} + +func (cfg *frozenConfig) BorrowIterator(data []byte) *Iterator { + select { + case iter := <-cfg.iteratorPool: + iter.ResetBytes(data) + return iter + default: + return ParseBytes(cfg, data) + } +} + +func (cfg *frozenConfig) ReturnIterator(iter *Iterator) { + iter.Error = nil + iter.Attachment = nil + select { + case cfg.iteratorPool <- iter: + return + default: + return + } +} diff --git a/vendor/github.com/json-iterator/go/feature_reflect.go b/vendor/github.com/json-iterator/go/feature_reflect.go new file mode 100644 index 000000000..1bd8987f2 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_reflect.go @@ -0,0 +1,721 @@ +package jsoniter + +import ( + "encoding" + "encoding/json" + "fmt" + "reflect" + "time" + "unsafe" +) + +// ValDecoder is an internal type registered to cache as needed. +// Don't confuse jsoniter.ValDecoder with json.Decoder. +// For json.Decoder's adapter, refer to jsoniter.AdapterDecoder(todo link). +// +// Reflection on type to create decoders, which is then cached +// Reflection on value is avoided as we can, as the reflect.Value itself will allocate, with following exceptions +// 1. create instance of new value, for example *int will need a int to be allocated +// 2. append to slice, if the existing cap is not enough, allocate will be done using Reflect.New +// 3. assignment to map, both key and value will be reflect.Value +// For a simple struct binding, it will be reflect.Value free and allocation free +type ValDecoder interface { + Decode(ptr unsafe.Pointer, iter *Iterator) +} + +// ValEncoder is an internal type registered to cache as needed. +// Don't confuse jsoniter.ValEncoder with json.Encoder. +// For json.Encoder's adapter, refer to jsoniter.AdapterEncoder(todo godoc link). +type ValEncoder interface { + IsEmpty(ptr unsafe.Pointer) bool + Encode(ptr unsafe.Pointer, stream *Stream) + EncodeInterface(val interface{}, stream *Stream) +} + +type checkIsEmpty interface { + IsEmpty(ptr unsafe.Pointer) bool +} + +// WriteToStream the default implementation for TypeEncoder method EncodeInterface +func WriteToStream(val interface{}, stream *Stream, encoder ValEncoder) { + e := (*emptyInterface)(unsafe.Pointer(&val)) + if e.word == nil { + stream.WriteNil() + return + } + if reflect.TypeOf(val).Kind() == reflect.Ptr { + encoder.Encode(unsafe.Pointer(&e.word), stream) + } else { + encoder.Encode(e.word, stream) + } +} + +var jsonNumberType reflect.Type +var jsoniterNumberType reflect.Type +var jsonRawMessageType reflect.Type +var jsoniterRawMessageType reflect.Type +var anyType reflect.Type +var marshalerType reflect.Type +var unmarshalerType reflect.Type +var textMarshalerType reflect.Type +var textUnmarshalerType reflect.Type + +func init() { + jsonNumberType = reflect.TypeOf((*json.Number)(nil)).Elem() + jsoniterNumberType = reflect.TypeOf((*Number)(nil)).Elem() + jsonRawMessageType = reflect.TypeOf((*json.RawMessage)(nil)).Elem() + jsoniterRawMessageType = reflect.TypeOf((*RawMessage)(nil)).Elem() + anyType = reflect.TypeOf((*Any)(nil)).Elem() + marshalerType = reflect.TypeOf((*json.Marshaler)(nil)).Elem() + unmarshalerType = reflect.TypeOf((*json.Unmarshaler)(nil)).Elem() + textMarshalerType = reflect.TypeOf((*encoding.TextMarshaler)(nil)).Elem() + textUnmarshalerType = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem() +} + +type OptionalDecoder struct { + ValueType reflect.Type + ValueDecoder ValDecoder +} + +func (decoder *OptionalDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if iter.ReadNil() { + *((*unsafe.Pointer)(ptr)) = nil + } else { + if *((*unsafe.Pointer)(ptr)) == nil { + //pointer to null, we have to allocate memory to hold the value + value := reflect.New(decoder.ValueType) + newPtr := extractInterface(value.Interface()).word + decoder.ValueDecoder.Decode(newPtr, iter) + *((*uintptr)(ptr)) = uintptr(newPtr) + } else { + //reuse existing instance + decoder.ValueDecoder.Decode(*((*unsafe.Pointer)(ptr)), iter) + } + } +} + +type deferenceDecoder struct { + // only to deference a pointer + valueType reflect.Type + valueDecoder ValDecoder +} + +func (decoder *deferenceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if *((*unsafe.Pointer)(ptr)) == nil { + //pointer to null, we have to allocate memory to hold the value + value := reflect.New(decoder.valueType) + newPtr := extractInterface(value.Interface()).word + decoder.valueDecoder.Decode(newPtr, iter) + *((*uintptr)(ptr)) = uintptr(newPtr) + } else { + //reuse existing instance + decoder.valueDecoder.Decode(*((*unsafe.Pointer)(ptr)), iter) + } +} + +type OptionalEncoder struct { + ValueEncoder ValEncoder +} + +func (encoder *OptionalEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + if *((*unsafe.Pointer)(ptr)) == nil { + stream.WriteNil() + } else { + encoder.ValueEncoder.Encode(*((*unsafe.Pointer)(ptr)), stream) + } +} + +func (encoder *OptionalEncoder) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, encoder) +} + +func (encoder *OptionalEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return *((*unsafe.Pointer)(ptr)) == nil +} + +type optionalMapEncoder struct { + valueEncoder ValEncoder +} + +func (encoder *optionalMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + if *((*unsafe.Pointer)(ptr)) == nil { + stream.WriteNil() + } else { + encoder.valueEncoder.Encode(*((*unsafe.Pointer)(ptr)), stream) + } +} + +func (encoder *optionalMapEncoder) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, encoder) +} + +func (encoder *optionalMapEncoder) IsEmpty(ptr unsafe.Pointer) bool { + p := *((*unsafe.Pointer)(ptr)) + return p == nil || encoder.valueEncoder.IsEmpty(p) +} + +type placeholderEncoder struct { + cfg *frozenConfig + cacheKey reflect.Type +} + +func (encoder *placeholderEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + encoder.getRealEncoder().Encode(ptr, stream) +} + +func (encoder *placeholderEncoder) EncodeInterface(val interface{}, stream *Stream) { + encoder.getRealEncoder().EncodeInterface(val, stream) +} + +func (encoder *placeholderEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return encoder.getRealEncoder().IsEmpty(ptr) +} + +func (encoder *placeholderEncoder) getRealEncoder() ValEncoder { + for i := 0; i < 500; i++ { + realDecoder := encoder.cfg.getEncoderFromCache(encoder.cacheKey) + _, isPlaceholder := realDecoder.(*placeholderEncoder) + if isPlaceholder { + time.Sleep(10 * time.Millisecond) + } else { + return realDecoder + } + } + panic(fmt.Sprintf("real encoder not found for cache key: %v", encoder.cacheKey)) +} + +type placeholderDecoder struct { + cfg *frozenConfig + cacheKey reflect.Type +} + +func (decoder *placeholderDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + for i := 0; i < 500; i++ { + realDecoder := decoder.cfg.getDecoderFromCache(decoder.cacheKey) + _, isPlaceholder := realDecoder.(*placeholderDecoder) + if isPlaceholder { + time.Sleep(10 * time.Millisecond) + } else { + realDecoder.Decode(ptr, iter) + return + } + } + panic(fmt.Sprintf("real decoder not found for cache key: %v", decoder.cacheKey)) +} + +// emptyInterface is the header for an interface{} value. +type emptyInterface struct { + typ unsafe.Pointer + word unsafe.Pointer +} + +// emptyInterface is the header for an interface with method (not interface{}) +type nonEmptyInterface struct { + // see ../runtime/iface.go:/Itab + itab *struct { + ityp unsafe.Pointer // static interface type + typ unsafe.Pointer // dynamic concrete type + link unsafe.Pointer + bad int32 + unused int32 + fun [100000]unsafe.Pointer // method table + } + word unsafe.Pointer +} + +// ReadVal copy the underlying JSON into go interface, same as json.Unmarshal +func (iter *Iterator) ReadVal(obj interface{}) { + typ := reflect.TypeOf(obj) + cacheKey := typ.Elem() + decoder, err := decoderOfType(iter.cfg, cacheKey) + if err != nil { + iter.Error = err + return + } + e := (*emptyInterface)(unsafe.Pointer(&obj)) + decoder.Decode(e.word, iter) +} + +// WriteVal copy the go interface into underlying JSON, same as json.Marshal +func (stream *Stream) WriteVal(val interface{}) { + if nil == val { + stream.WriteNil() + return + } + typ := reflect.TypeOf(val) + cacheKey := typ + encoder, err := encoderOfType(stream.cfg, cacheKey) + if err != nil { + stream.Error = err + return + } + encoder.EncodeInterface(val, stream) +} + +type prefix string + +func (p prefix) addToDecoder(decoder ValDecoder, err error) (ValDecoder, error) { + if err != nil { + return nil, fmt.Errorf("%s: %s", p, err.Error()) + } + return decoder, err +} + +func (p prefix) addToEncoder(encoder ValEncoder, err error) (ValEncoder, error) { + if err != nil { + return nil, fmt.Errorf("%s: %s", p, err.Error()) + } + return encoder, err +} + +func decoderOfType(cfg *frozenConfig, typ reflect.Type) (ValDecoder, error) { + cacheKey := typ + decoder := cfg.getDecoderFromCache(cacheKey) + if decoder != nil { + return decoder, nil + } + decoder = getTypeDecoderFromExtension(typ) + if decoder != nil { + cfg.addDecoderToCache(cacheKey, decoder) + return decoder, nil + } + decoder = &placeholderDecoder{cfg: cfg, cacheKey: cacheKey} + cfg.addDecoderToCache(cacheKey, decoder) + decoder, err := createDecoderOfType(cfg, typ) + for _, extension := range extensions { + decoder = extension.DecorateDecoder(typ, decoder) + } + cfg.addDecoderToCache(cacheKey, decoder) + return decoder, err +} + +func createDecoderOfType(cfg *frozenConfig, typ reflect.Type) (ValDecoder, error) { + typeName := typ.String() + if typ == jsonRawMessageType { + return &jsonRawMessageCodec{}, nil + } + if typ == jsoniterRawMessageType { + return &jsoniterRawMessageCodec{}, nil + } + if typ.AssignableTo(jsonNumberType) { + return &jsonNumberCodec{}, nil + } + if typ.AssignableTo(jsoniterNumberType) { + return &jsoniterNumberCodec{}, nil + } + if typ.Implements(unmarshalerType) { + templateInterface := reflect.New(typ).Elem().Interface() + var decoder ValDecoder = &unmarshalerDecoder{extractInterface(templateInterface)} + if typ.Kind() == reflect.Ptr { + decoder = &OptionalDecoder{typ.Elem(), decoder} + } + return decoder, nil + } + if reflect.PtrTo(typ).Implements(unmarshalerType) { + templateInterface := reflect.New(typ).Interface() + var decoder ValDecoder = &unmarshalerDecoder{extractInterface(templateInterface)} + return decoder, nil + } + if typ.Implements(textUnmarshalerType) { + templateInterface := reflect.New(typ).Elem().Interface() + var decoder ValDecoder = &textUnmarshalerDecoder{extractInterface(templateInterface)} + if typ.Kind() == reflect.Ptr { + decoder = &OptionalDecoder{typ.Elem(), decoder} + } + return decoder, nil + } + if reflect.PtrTo(typ).Implements(textUnmarshalerType) { + templateInterface := reflect.New(typ).Interface() + var decoder ValDecoder = &textUnmarshalerDecoder{extractInterface(templateInterface)} + return decoder, nil + } + if typ.Kind() == reflect.Slice && typ.Elem().Kind() == reflect.Uint8 { + sliceDecoder, err := prefix("[slice]").addToDecoder(decoderOfSlice(cfg, typ)) + if err != nil { + return nil, err + } + return &base64Codec{sliceDecoder: sliceDecoder}, nil + } + if typ.Implements(anyType) { + return &anyCodec{}, nil + } + switch typ.Kind() { + case reflect.String: + if typeName != "string" { + return decoderOfType(cfg, reflect.TypeOf((*string)(nil)).Elem()) + } + return &stringCodec{}, nil + case reflect.Int: + if typeName != "int" { + return decoderOfType(cfg, reflect.TypeOf((*int)(nil)).Elem()) + } + return &intCodec{}, nil + case reflect.Int8: + if typeName != "int8" { + return decoderOfType(cfg, reflect.TypeOf((*int8)(nil)).Elem()) + } + return &int8Codec{}, nil + case reflect.Int16: + if typeName != "int16" { + return decoderOfType(cfg, reflect.TypeOf((*int16)(nil)).Elem()) + } + return &int16Codec{}, nil + case reflect.Int32: + if typeName != "int32" { + return decoderOfType(cfg, reflect.TypeOf((*int32)(nil)).Elem()) + } + return &int32Codec{}, nil + case reflect.Int64: + if typeName != "int64" { + return decoderOfType(cfg, reflect.TypeOf((*int64)(nil)).Elem()) + } + return &int64Codec{}, nil + case reflect.Uint: + if typeName != "uint" { + return decoderOfType(cfg, reflect.TypeOf((*uint)(nil)).Elem()) + } + return &uintCodec{}, nil + case reflect.Uint8: + if typeName != "uint8" { + return decoderOfType(cfg, reflect.TypeOf((*uint8)(nil)).Elem()) + } + return &uint8Codec{}, nil + case reflect.Uint16: + if typeName != "uint16" { + return decoderOfType(cfg, reflect.TypeOf((*uint16)(nil)).Elem()) + } + return &uint16Codec{}, nil + case reflect.Uint32: + if typeName != "uint32" { + return decoderOfType(cfg, reflect.TypeOf((*uint32)(nil)).Elem()) + } + return &uint32Codec{}, nil + case reflect.Uintptr: + if typeName != "uintptr" { + return decoderOfType(cfg, reflect.TypeOf((*uintptr)(nil)).Elem()) + } + return &uintptrCodec{}, nil + case reflect.Uint64: + if typeName != "uint64" { + return decoderOfType(cfg, reflect.TypeOf((*uint64)(nil)).Elem()) + } + return &uint64Codec{}, nil + case reflect.Float32: + if typeName != "float32" { + return decoderOfType(cfg, reflect.TypeOf((*float32)(nil)).Elem()) + } + return &float32Codec{}, nil + case reflect.Float64: + if typeName != "float64" { + return decoderOfType(cfg, reflect.TypeOf((*float64)(nil)).Elem()) + } + return &float64Codec{}, nil + case reflect.Bool: + if typeName != "bool" { + return decoderOfType(cfg, reflect.TypeOf((*bool)(nil)).Elem()) + } + return &boolCodec{}, nil + case reflect.Interface: + if typ.NumMethod() == 0 { + return &emptyInterfaceCodec{}, nil + } + return &nonEmptyInterfaceCodec{}, nil + case reflect.Struct: + return prefix(fmt.Sprintf("[%s]", typeName)).addToDecoder(decoderOfStruct(cfg, typ)) + case reflect.Array: + return prefix("[array]").addToDecoder(decoderOfArray(cfg, typ)) + case reflect.Slice: + return prefix("[slice]").addToDecoder(decoderOfSlice(cfg, typ)) + case reflect.Map: + return prefix("[map]").addToDecoder(decoderOfMap(cfg, typ)) + case reflect.Ptr: + return prefix("[optional]").addToDecoder(decoderOfOptional(cfg, typ)) + default: + return nil, fmt.Errorf("unsupported type: %v", typ) + } +} + +func encoderOfType(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) { + cacheKey := typ + encoder := cfg.getEncoderFromCache(cacheKey) + if encoder != nil { + return encoder, nil + } + encoder = getTypeEncoderFromExtension(typ) + if encoder != nil { + cfg.addEncoderToCache(cacheKey, encoder) + return encoder, nil + } + encoder = &placeholderEncoder{cfg: cfg, cacheKey: cacheKey} + cfg.addEncoderToCache(cacheKey, encoder) + encoder, err := createEncoderOfType(cfg, typ) + for _, extension := range extensions { + encoder = extension.DecorateEncoder(typ, encoder) + } + cfg.addEncoderToCache(cacheKey, encoder) + return encoder, err +} + +func createEncoderOfType(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) { + if typ == jsonRawMessageType { + return &jsonRawMessageCodec{}, nil + } + if typ == jsoniterRawMessageType { + return &jsoniterRawMessageCodec{}, nil + } + if typ.AssignableTo(jsonNumberType) { + return &jsonNumberCodec{}, nil + } + if typ.AssignableTo(jsoniterNumberType) { + return &jsoniterNumberCodec{}, nil + } + if typ.Implements(marshalerType) { + checkIsEmpty, err := createCheckIsEmpty(typ) + if err != nil { + return nil, err + } + templateInterface := reflect.New(typ).Elem().Interface() + var encoder ValEncoder = &marshalerEncoder{ + templateInterface: extractInterface(templateInterface), + checkIsEmpty: checkIsEmpty, + } + if typ.Kind() == reflect.Ptr { + encoder = &OptionalEncoder{encoder} + } + return encoder, nil + } + if reflect.PtrTo(typ).Implements(marshalerType) { + checkIsEmpty, err := createCheckIsEmpty(reflect.PtrTo(typ)) + if err != nil { + return nil, err + } + templateInterface := reflect.New(typ).Interface() + var encoder ValEncoder = &marshalerEncoder{ + templateInterface: extractInterface(templateInterface), + checkIsEmpty: checkIsEmpty, + } + return encoder, nil + } + if typ.Implements(textMarshalerType) { + checkIsEmpty, err := createCheckIsEmpty(typ) + if err != nil { + return nil, err + } + templateInterface := reflect.New(typ).Elem().Interface() + var encoder ValEncoder = &textMarshalerEncoder{ + templateInterface: extractInterface(templateInterface), + checkIsEmpty: checkIsEmpty, + } + if typ.Kind() == reflect.Ptr { + encoder = &OptionalEncoder{encoder} + } + return encoder, nil + } + if typ.Kind() == reflect.Slice && typ.Elem().Kind() == reflect.Uint8 { + return &base64Codec{}, nil + } + if typ.Implements(anyType) { + return &anyCodec{}, nil + } + return createEncoderOfSimpleType(cfg, typ) +} + +func createCheckIsEmpty(typ reflect.Type) (checkIsEmpty, error) { + kind := typ.Kind() + switch kind { + case reflect.String: + return &stringCodec{}, nil + case reflect.Int: + return &intCodec{}, nil + case reflect.Int8: + return &int8Codec{}, nil + case reflect.Int16: + return &int16Codec{}, nil + case reflect.Int32: + return &int32Codec{}, nil + case reflect.Int64: + return &int64Codec{}, nil + case reflect.Uint: + return &uintCodec{}, nil + case reflect.Uint8: + return &uint8Codec{}, nil + case reflect.Uint16: + return &uint16Codec{}, nil + case reflect.Uint32: + return &uint32Codec{}, nil + case reflect.Uintptr: + return &uintptrCodec{}, nil + case reflect.Uint64: + return &uint64Codec{}, nil + case reflect.Float32: + return &float32Codec{}, nil + case reflect.Float64: + return &float64Codec{}, nil + case reflect.Bool: + return &boolCodec{}, nil + case reflect.Interface: + if typ.NumMethod() == 0 { + return &emptyInterfaceCodec{}, nil + } + return &nonEmptyInterfaceCodec{}, nil + case reflect.Struct: + return &structEncoder{}, nil + case reflect.Array: + return &arrayEncoder{}, nil + case reflect.Slice: + return &sliceEncoder{}, nil + case reflect.Map: + return &mapEncoder{}, nil + case reflect.Ptr: + return &OptionalEncoder{}, nil + default: + return nil, fmt.Errorf("unsupported type: %v", typ) + } +} + +func createEncoderOfSimpleType(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) { + typeName := typ.String() + kind := typ.Kind() + switch kind { + case reflect.String: + if typeName != "string" { + return encoderOfType(cfg, reflect.TypeOf((*string)(nil)).Elem()) + } + return &stringCodec{}, nil + case reflect.Int: + if typeName != "int" { + return encoderOfType(cfg, reflect.TypeOf((*int)(nil)).Elem()) + } + return &intCodec{}, nil + case reflect.Int8: + if typeName != "int8" { + return encoderOfType(cfg, reflect.TypeOf((*int8)(nil)).Elem()) + } + return &int8Codec{}, nil + case reflect.Int16: + if typeName != "int16" { + return encoderOfType(cfg, reflect.TypeOf((*int16)(nil)).Elem()) + } + return &int16Codec{}, nil + case reflect.Int32: + if typeName != "int32" { + return encoderOfType(cfg, reflect.TypeOf((*int32)(nil)).Elem()) + } + return &int32Codec{}, nil + case reflect.Int64: + if typeName != "int64" { + return encoderOfType(cfg, reflect.TypeOf((*int64)(nil)).Elem()) + } + return &int64Codec{}, nil + case reflect.Uint: + if typeName != "uint" { + return encoderOfType(cfg, reflect.TypeOf((*uint)(nil)).Elem()) + } + return &uintCodec{}, nil + case reflect.Uint8: + if typeName != "uint8" { + return encoderOfType(cfg, reflect.TypeOf((*uint8)(nil)).Elem()) + } + return &uint8Codec{}, nil + case reflect.Uint16: + if typeName != "uint16" { + return encoderOfType(cfg, reflect.TypeOf((*uint16)(nil)).Elem()) + } + return &uint16Codec{}, nil + case reflect.Uint32: + if typeName != "uint32" { + return encoderOfType(cfg, reflect.TypeOf((*uint32)(nil)).Elem()) + } + return &uint32Codec{}, nil + case reflect.Uintptr: + if typeName != "uintptr" { + return encoderOfType(cfg, reflect.TypeOf((*uintptr)(nil)).Elem()) + } + return &uintptrCodec{}, nil + case reflect.Uint64: + if typeName != "uint64" { + return encoderOfType(cfg, reflect.TypeOf((*uint64)(nil)).Elem()) + } + return &uint64Codec{}, nil + case reflect.Float32: + if typeName != "float32" { + return encoderOfType(cfg, reflect.TypeOf((*float32)(nil)).Elem()) + } + return &float32Codec{}, nil + case reflect.Float64: + if typeName != "float64" { + return encoderOfType(cfg, reflect.TypeOf((*float64)(nil)).Elem()) + } + return &float64Codec{}, nil + case reflect.Bool: + if typeName != "bool" { + return encoderOfType(cfg, reflect.TypeOf((*bool)(nil)).Elem()) + } + return &boolCodec{}, nil + case reflect.Interface: + if typ.NumMethod() == 0 { + return &emptyInterfaceCodec{}, nil + } + return &nonEmptyInterfaceCodec{}, nil + case reflect.Struct: + return prefix(fmt.Sprintf("[%s]", typeName)).addToEncoder(encoderOfStruct(cfg, typ)) + case reflect.Array: + return prefix("[array]").addToEncoder(encoderOfArray(cfg, typ)) + case reflect.Slice: + return prefix("[slice]").addToEncoder(encoderOfSlice(cfg, typ)) + case reflect.Map: + return prefix("[map]").addToEncoder(encoderOfMap(cfg, typ)) + case reflect.Ptr: + return prefix("[optional]").addToEncoder(encoderOfOptional(cfg, typ)) + default: + return nil, fmt.Errorf("unsupported type: %v", typ) + } +} + +func decoderOfOptional(cfg *frozenConfig, typ reflect.Type) (ValDecoder, error) { + elemType := typ.Elem() + decoder, err := decoderOfType(cfg, elemType) + if err != nil { + return nil, err + } + return &OptionalDecoder{elemType, decoder}, nil +} + +func encoderOfOptional(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) { + elemType := typ.Elem() + elemEncoder, err := encoderOfType(cfg, elemType) + if err != nil { + return nil, err + } + encoder := &OptionalEncoder{elemEncoder} + if elemType.Kind() == reflect.Map { + encoder = &OptionalEncoder{encoder} + } + return encoder, nil +} + +func decoderOfMap(cfg *frozenConfig, typ reflect.Type) (ValDecoder, error) { + decoder, err := decoderOfType(cfg, typ.Elem()) + if err != nil { + return nil, err + } + mapInterface := reflect.New(typ).Interface() + return &mapDecoder{typ, typ.Key(), typ.Elem(), decoder, extractInterface(mapInterface)}, nil +} + +func extractInterface(val interface{}) emptyInterface { + return *((*emptyInterface)(unsafe.Pointer(&val))) +} + +func encoderOfMap(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) { + elemType := typ.Elem() + encoder, err := encoderOfType(cfg, elemType) + if err != nil { + return nil, err + } + mapInterface := reflect.New(typ).Elem().Interface() + if cfg.sortMapKeys { + return &sortKeysMapEncoder{typ, elemType, encoder, *((*emptyInterface)(unsafe.Pointer(&mapInterface)))}, nil + } + return &mapEncoder{typ, elemType, encoder, *((*emptyInterface)(unsafe.Pointer(&mapInterface)))}, nil +} diff --git a/vendor/github.com/json-iterator/go/feature_reflect_array.go b/vendor/github.com/json-iterator/go/feature_reflect_array.go new file mode 100644 index 000000000..d661fb6fe --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_reflect_array.go @@ -0,0 +1,99 @@ +package jsoniter + +import ( + "fmt" + "io" + "reflect" + "unsafe" +) + +func decoderOfArray(cfg *frozenConfig, typ reflect.Type) (ValDecoder, error) { + decoder, err := decoderOfType(cfg, typ.Elem()) + if err != nil { + return nil, err + } + return &arrayDecoder{typ, typ.Elem(), decoder}, nil +} + +func encoderOfArray(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) { + encoder, err := encoderOfType(cfg, typ.Elem()) + if err != nil { + return nil, err + } + if typ.Elem().Kind() == reflect.Map { + encoder = &OptionalEncoder{encoder} + } + return &arrayEncoder{typ, typ.Elem(), encoder}, nil +} + +type arrayEncoder struct { + arrayType reflect.Type + elemType reflect.Type + elemEncoder ValEncoder +} + +func (encoder *arrayEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteArrayStart() + elemPtr := unsafe.Pointer(ptr) + encoder.elemEncoder.Encode(elemPtr, stream) + for i := 1; i < encoder.arrayType.Len(); i++ { + stream.WriteMore() + elemPtr = unsafe.Pointer(uintptr(elemPtr) + encoder.elemType.Size()) + encoder.elemEncoder.Encode(unsafe.Pointer(elemPtr), stream) + } + stream.WriteArrayEnd() + if stream.Error != nil && stream.Error != io.EOF { + stream.Error = fmt.Errorf("%v: %s", encoder.arrayType, stream.Error.Error()) + } +} + +func (encoder *arrayEncoder) EncodeInterface(val interface{}, stream *Stream) { + // special optimization for interface{} + e := (*emptyInterface)(unsafe.Pointer(&val)) + if e.word == nil { + stream.WriteArrayStart() + stream.WriteNil() + stream.WriteArrayEnd() + return + } + elemType := encoder.arrayType.Elem() + if encoder.arrayType.Len() == 1 && (elemType.Kind() == reflect.Ptr || elemType.Kind() == reflect.Map) { + ptr := uintptr(e.word) + e.word = unsafe.Pointer(&ptr) + } + if reflect.TypeOf(val).Kind() == reflect.Ptr { + encoder.Encode(unsafe.Pointer(&e.word), stream) + } else { + encoder.Encode(e.word, stream) + } +} + +func (encoder *arrayEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return false +} + +type arrayDecoder struct { + arrayType reflect.Type + elemType reflect.Type + elemDecoder ValDecoder +} + +func (decoder *arrayDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + decoder.doDecode(ptr, iter) + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v: %s", decoder.arrayType, iter.Error.Error()) + } +} + +func (decoder *arrayDecoder) doDecode(ptr unsafe.Pointer, iter *Iterator) { + offset := uintptr(0) + iter.ReadArrayCB(func(iter *Iterator) bool { + if offset < decoder.arrayType.Size() { + decoder.elemDecoder.Decode(unsafe.Pointer(uintptr(ptr)+offset), iter) + offset += decoder.elemType.Size() + } else { + iter.Skip() + } + return true + }) +} diff --git a/vendor/github.com/json-iterator/go/feature_reflect_extension.go b/vendor/github.com/json-iterator/go/feature_reflect_extension.go new file mode 100644 index 000000000..177df2c81 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_reflect_extension.go @@ -0,0 +1,414 @@ +package jsoniter + +import ( + "fmt" + "reflect" + "sort" + "strings" + "unicode" + "unsafe" +) + +var typeDecoders = map[string]ValDecoder{} +var fieldDecoders = map[string]ValDecoder{} +var typeEncoders = map[string]ValEncoder{} +var fieldEncoders = map[string]ValEncoder{} +var extensions = []Extension{} + +// StructDescriptor describe how should we encode/decode the struct +type StructDescriptor struct { + onePtrEmbedded bool + onePtrOptimization bool + Type reflect.Type + Fields []*Binding +} + +// GetField get one field from the descriptor by its name. +// Can not use map here to keep field orders. +func (structDescriptor *StructDescriptor) GetField(fieldName string) *Binding { + for _, binding := range structDescriptor.Fields { + if binding.Field.Name == fieldName { + return binding + } + } + return nil +} + +// Binding describe how should we encode/decode the struct field +type Binding struct { + levels []int + Field *reflect.StructField + FromNames []string + ToNames []string + Encoder ValEncoder + Decoder ValDecoder +} + +// Extension the one for all SPI. Customize encoding/decoding by specifying alternate encoder/decoder. +// Can also rename fields by UpdateStructDescriptor. +type Extension interface { + UpdateStructDescriptor(structDescriptor *StructDescriptor) + CreateDecoder(typ reflect.Type) ValDecoder + CreateEncoder(typ reflect.Type) ValEncoder + DecorateDecoder(typ reflect.Type, decoder ValDecoder) ValDecoder + DecorateEncoder(typ reflect.Type, encoder ValEncoder) ValEncoder +} + +// DummyExtension embed this type get dummy implementation for all methods of Extension +type DummyExtension struct { +} + +// UpdateStructDescriptor No-op +func (extension *DummyExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) { +} + +// CreateDecoder No-op +func (extension *DummyExtension) CreateDecoder(typ reflect.Type) ValDecoder { + return nil +} + +// CreateEncoder No-op +func (extension *DummyExtension) CreateEncoder(typ reflect.Type) ValEncoder { + return nil +} + +// DecorateDecoder No-op +func (extension *DummyExtension) DecorateDecoder(typ reflect.Type, decoder ValDecoder) ValDecoder { + return decoder +} + +// DecorateEncoder No-op +func (extension *DummyExtension) DecorateEncoder(typ reflect.Type, encoder ValEncoder) ValEncoder { + return encoder +} + +type funcDecoder struct { + fun DecoderFunc +} + +func (decoder *funcDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + decoder.fun(ptr, iter) +} + +type funcEncoder struct { + fun EncoderFunc + isEmptyFunc func(ptr unsafe.Pointer) bool +} + +func (encoder *funcEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + encoder.fun(ptr, stream) +} + +func (encoder *funcEncoder) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, encoder) +} + +func (encoder *funcEncoder) IsEmpty(ptr unsafe.Pointer) bool { + if encoder.isEmptyFunc == nil { + return false + } + return encoder.isEmptyFunc(ptr) +} + +// DecoderFunc the function form of TypeDecoder +type DecoderFunc func(ptr unsafe.Pointer, iter *Iterator) + +// EncoderFunc the function form of TypeEncoder +type EncoderFunc func(ptr unsafe.Pointer, stream *Stream) + +// RegisterTypeDecoderFunc register TypeDecoder for a type with function +func RegisterTypeDecoderFunc(typ string, fun DecoderFunc) { + typeDecoders[typ] = &funcDecoder{fun} +} + +// RegisterTypeDecoder register TypeDecoder for a typ +func RegisterTypeDecoder(typ string, decoder ValDecoder) { + typeDecoders[typ] = decoder +} + +// RegisterFieldDecoderFunc register TypeDecoder for a struct field with function +func RegisterFieldDecoderFunc(typ string, field string, fun DecoderFunc) { + RegisterFieldDecoder(typ, field, &funcDecoder{fun}) +} + +// RegisterFieldDecoder register TypeDecoder for a struct field +func RegisterFieldDecoder(typ string, field string, decoder ValDecoder) { + fieldDecoders[fmt.Sprintf("%s/%s", typ, field)] = decoder +} + +// RegisterTypeEncoderFunc register TypeEncoder for a type with encode/isEmpty function +func RegisterTypeEncoderFunc(typ string, fun EncoderFunc, isEmptyFunc func(unsafe.Pointer) bool) { + typeEncoders[typ] = &funcEncoder{fun, isEmptyFunc} +} + +// RegisterTypeEncoder register TypeEncoder for a type +func RegisterTypeEncoder(typ string, encoder ValEncoder) { + typeEncoders[typ] = encoder +} + +// RegisterFieldEncoderFunc register TypeEncoder for a struct field with encode/isEmpty function +func RegisterFieldEncoderFunc(typ string, field string, fun EncoderFunc, isEmptyFunc func(unsafe.Pointer) bool) { + RegisterFieldEncoder(typ, field, &funcEncoder{fun, isEmptyFunc}) +} + +// RegisterFieldEncoder register TypeEncoder for a struct field +func RegisterFieldEncoder(typ string, field string, encoder ValEncoder) { + fieldEncoders[fmt.Sprintf("%s/%s", typ, field)] = encoder +} + +// RegisterExtension register extension +func RegisterExtension(extension Extension) { + extensions = append(extensions, extension) +} + +func getTypeDecoderFromExtension(typ reflect.Type) ValDecoder { + decoder := _getTypeDecoderFromExtension(typ) + if decoder != nil { + for _, extension := range extensions { + decoder = extension.DecorateDecoder(typ, decoder) + } + } + return decoder +} +func _getTypeDecoderFromExtension(typ reflect.Type) ValDecoder { + for _, extension := range extensions { + decoder := extension.CreateDecoder(typ) + if decoder != nil { + return decoder + } + } + typeName := typ.String() + decoder := typeDecoders[typeName] + if decoder != nil { + return decoder + } + if typ.Kind() == reflect.Ptr { + decoder := typeDecoders[typ.Elem().String()] + if decoder != nil { + return &OptionalDecoder{typ.Elem(), decoder} + } + } + return nil +} + +func getTypeEncoderFromExtension(typ reflect.Type) ValEncoder { + encoder := _getTypeEncoderFromExtension(typ) + if encoder != nil { + for _, extension := range extensions { + encoder = extension.DecorateEncoder(typ, encoder) + } + } + return encoder +} + +func _getTypeEncoderFromExtension(typ reflect.Type) ValEncoder { + for _, extension := range extensions { + encoder := extension.CreateEncoder(typ) + if encoder != nil { + return encoder + } + } + typeName := typ.String() + encoder := typeEncoders[typeName] + if encoder != nil { + return encoder + } + if typ.Kind() == reflect.Ptr { + encoder := typeEncoders[typ.Elem().String()] + if encoder != nil { + return &OptionalEncoder{encoder} + } + } + return nil +} + +func describeStruct(cfg *frozenConfig, typ reflect.Type) (*StructDescriptor, error) { + embeddedBindings := []*Binding{} + bindings := []*Binding{} + for i := 0; i < typ.NumField(); i++ { + field := typ.Field(i) + tag := field.Tag.Get(cfg.getTagKey()) + tagParts := strings.Split(tag, ",") + if tag == "-" { + continue + } + if field.Anonymous && (tag == "" || tagParts[0] == "") { + if field.Type.Kind() == reflect.Struct { + structDescriptor, err := describeStruct(cfg, field.Type) + if err != nil { + return nil, err + } + for _, binding := range structDescriptor.Fields { + binding.levels = append([]int{i}, binding.levels...) + omitempty := binding.Encoder.(*structFieldEncoder).omitempty + binding.Encoder = &structFieldEncoder{&field, binding.Encoder, omitempty} + binding.Decoder = &structFieldDecoder{&field, binding.Decoder} + embeddedBindings = append(embeddedBindings, binding) + } + continue + } else if field.Type.Kind() == reflect.Ptr && field.Type.Elem().Kind() == reflect.Struct { + structDescriptor, err := describeStruct(cfg, field.Type.Elem()) + if err != nil { + return nil, err + } + for _, binding := range structDescriptor.Fields { + binding.levels = append([]int{i}, binding.levels...) + omitempty := binding.Encoder.(*structFieldEncoder).omitempty + binding.Encoder = &OptionalEncoder{binding.Encoder} + binding.Encoder = &structFieldEncoder{&field, binding.Encoder, omitempty} + binding.Decoder = &deferenceDecoder{field.Type.Elem(), binding.Decoder} + binding.Decoder = &structFieldDecoder{&field, binding.Decoder} + embeddedBindings = append(embeddedBindings, binding) + } + continue + } + } + fieldNames := calcFieldNames(field.Name, tagParts[0], tag) + fieldCacheKey := fmt.Sprintf("%s/%s", typ.String(), field.Name) + decoder := fieldDecoders[fieldCacheKey] + if decoder == nil { + var err error + decoder, err = decoderOfType(cfg, field.Type) + if len(fieldNames) > 0 && err != nil { + return nil, err + } + } + encoder := fieldEncoders[fieldCacheKey] + if encoder == nil { + var err error + encoder, err = encoderOfType(cfg, field.Type) + if len(fieldNames) > 0 && err != nil { + return nil, err + } + // map is stored as pointer in the struct, + // and treat nil or empty map as empty field + if encoder != nil && field.Type.Kind() == reflect.Map { + encoder = &optionalMapEncoder{encoder} + } + } + binding := &Binding{ + Field: &field, + FromNames: fieldNames, + ToNames: fieldNames, + Decoder: decoder, + Encoder: encoder, + } + binding.levels = []int{i} + bindings = append(bindings, binding) + } + return createStructDescriptor(cfg, typ, bindings, embeddedBindings), nil +} +func createStructDescriptor(cfg *frozenConfig, typ reflect.Type, bindings []*Binding, embeddedBindings []*Binding) *StructDescriptor { + onePtrEmbedded := false + onePtrOptimization := false + if typ.NumField() == 1 { + firstField := typ.Field(0) + switch firstField.Type.Kind() { + case reflect.Ptr: + if firstField.Anonymous && firstField.Type.Elem().Kind() == reflect.Struct { + onePtrEmbedded = true + } + fallthrough + case reflect.Map: + onePtrOptimization = true + case reflect.Struct: + onePtrOptimization = isStructOnePtr(firstField.Type) + } + } + structDescriptor := &StructDescriptor{ + onePtrEmbedded: onePtrEmbedded, + onePtrOptimization: onePtrOptimization, + Type: typ, + Fields: bindings, + } + for _, extension := range extensions { + extension.UpdateStructDescriptor(structDescriptor) + } + processTags(structDescriptor, cfg) + // merge normal & embedded bindings & sort with original order + allBindings := sortableBindings(append(embeddedBindings, structDescriptor.Fields...)) + sort.Sort(allBindings) + structDescriptor.Fields = allBindings + return structDescriptor +} + +func isStructOnePtr(typ reflect.Type) bool { + if typ.NumField() == 1 { + firstField := typ.Field(0) + switch firstField.Type.Kind() { + case reflect.Ptr: + return true + case reflect.Map: + return true + case reflect.Struct: + return isStructOnePtr(firstField.Type) + } + } + return false +} + +type sortableBindings []*Binding + +func (bindings sortableBindings) Len() int { + return len(bindings) +} + +func (bindings sortableBindings) Less(i, j int) bool { + left := bindings[i].levels + right := bindings[j].levels + k := 0 + for { + if left[k] < right[k] { + return true + } else if left[k] > right[k] { + return false + } + k++ + } +} + +func (bindings sortableBindings) Swap(i, j int) { + bindings[i], bindings[j] = bindings[j], bindings[i] +} + +func processTags(structDescriptor *StructDescriptor, cfg *frozenConfig) { + for _, binding := range structDescriptor.Fields { + shouldOmitEmpty := false + tagParts := strings.Split(binding.Field.Tag.Get(cfg.getTagKey()), ",") + for _, tagPart := range tagParts[1:] { + if tagPart == "omitempty" { + shouldOmitEmpty = true + } else if tagPart == "string" { + if binding.Field.Type.Kind() == reflect.String { + binding.Decoder = &stringModeStringDecoder{binding.Decoder, cfg} + binding.Encoder = &stringModeStringEncoder{binding.Encoder, cfg} + } else { + binding.Decoder = &stringModeNumberDecoder{binding.Decoder} + binding.Encoder = &stringModeNumberEncoder{binding.Encoder} + } + } + } + binding.Decoder = &structFieldDecoder{binding.Field, binding.Decoder} + binding.Encoder = &structFieldEncoder{binding.Field, binding.Encoder, shouldOmitEmpty} + } +} + +func calcFieldNames(originalFieldName string, tagProvidedFieldName string, wholeTag string) []string { + // ignore? + if wholeTag == "-" { + return []string{} + } + // rename? + var fieldNames []string + if tagProvidedFieldName == "" { + fieldNames = []string{originalFieldName} + } else { + fieldNames = []string{tagProvidedFieldName} + } + // private? + isNotExported := unicode.IsLower(rune(originalFieldName[0])) + if isNotExported { + fieldNames = []string{} + } + return fieldNames +} diff --git a/vendor/github.com/json-iterator/go/feature_reflect_map.go b/vendor/github.com/json-iterator/go/feature_reflect_map.go new file mode 100644 index 000000000..005671e01 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_reflect_map.go @@ -0,0 +1,244 @@ +package jsoniter + +import ( + "encoding" + "encoding/json" + "reflect" + "sort" + "strconv" + "unsafe" +) + +type mapDecoder struct { + mapType reflect.Type + keyType reflect.Type + elemType reflect.Type + elemDecoder ValDecoder + mapInterface emptyInterface +} + +func (decoder *mapDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + // dark magic to cast unsafe.Pointer back to interface{} using reflect.Type + mapInterface := decoder.mapInterface + mapInterface.word = ptr + realInterface := (*interface{})(unsafe.Pointer(&mapInterface)) + realVal := reflect.ValueOf(*realInterface).Elem() + if iter.ReadNil() { + realVal.Set(reflect.Zero(decoder.mapType)) + return + } + if realVal.IsNil() { + realVal.Set(reflect.MakeMap(realVal.Type())) + } + iter.ReadMapCB(func(iter *Iterator, keyStr string) bool { + elem := reflect.New(decoder.elemType) + decoder.elemDecoder.Decode(unsafe.Pointer(elem.Pointer()), iter) + // to put into map, we have to use reflection + keyType := decoder.keyType + // TODO: remove this from loop + switch { + case keyType.Kind() == reflect.String: + realVal.SetMapIndex(reflect.ValueOf(keyStr).Convert(keyType), elem.Elem()) + return true + case keyType.Implements(textUnmarshalerType): + textUnmarshaler := reflect.New(keyType.Elem()).Interface().(encoding.TextUnmarshaler) + err := textUnmarshaler.UnmarshalText([]byte(keyStr)) + if err != nil { + iter.ReportError("read map key as TextUnmarshaler", err.Error()) + return false + } + realVal.SetMapIndex(reflect.ValueOf(textUnmarshaler), elem.Elem()) + return true + case reflect.PtrTo(keyType).Implements(textUnmarshalerType): + textUnmarshaler := reflect.New(keyType).Interface().(encoding.TextUnmarshaler) + err := textUnmarshaler.UnmarshalText([]byte(keyStr)) + if err != nil { + iter.ReportError("read map key as TextUnmarshaler", err.Error()) + return false + } + realVal.SetMapIndex(reflect.ValueOf(textUnmarshaler).Elem(), elem.Elem()) + return true + default: + switch keyType.Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + n, err := strconv.ParseInt(keyStr, 10, 64) + if err != nil || reflect.Zero(keyType).OverflowInt(n) { + iter.ReportError("read map key as int64", "read int64 failed") + return false + } + realVal.SetMapIndex(reflect.ValueOf(n).Convert(keyType), elem.Elem()) + return true + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + n, err := strconv.ParseUint(keyStr, 10, 64) + if err != nil || reflect.Zero(keyType).OverflowUint(n) { + iter.ReportError("read map key as uint64", "read uint64 failed") + return false + } + realVal.SetMapIndex(reflect.ValueOf(n).Convert(keyType), elem.Elem()) + return true + } + } + iter.ReportError("read map key", "unexpected map key type "+keyType.String()) + return true + }) +} + +type mapEncoder struct { + mapType reflect.Type + elemType reflect.Type + elemEncoder ValEncoder + mapInterface emptyInterface +} + +func (encoder *mapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + mapInterface := encoder.mapInterface + mapInterface.word = ptr + realInterface := (*interface{})(unsafe.Pointer(&mapInterface)) + realVal := reflect.ValueOf(*realInterface) + stream.WriteObjectStart() + for i, key := range realVal.MapKeys() { + if i != 0 { + stream.WriteMore() + } + encodeMapKey(key, stream) + if stream.indention > 0 { + stream.writeTwoBytes(byte(':'), byte(' ')) + } else { + stream.writeByte(':') + } + val := realVal.MapIndex(key).Interface() + encoder.elemEncoder.EncodeInterface(val, stream) + } + stream.WriteObjectEnd() +} + +func encodeMapKey(key reflect.Value, stream *Stream) { + if key.Kind() == reflect.String { + stream.WriteString(key.String()) + return + } + if tm, ok := key.Interface().(encoding.TextMarshaler); ok { + buf, err := tm.MarshalText() + if err != nil { + stream.Error = err + return + } + stream.writeByte('"') + stream.Write(buf) + stream.writeByte('"') + return + } + switch key.Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + stream.writeByte('"') + stream.WriteInt64(key.Int()) + stream.writeByte('"') + return + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + stream.writeByte('"') + stream.WriteUint64(key.Uint()) + stream.writeByte('"') + return + } + stream.Error = &json.UnsupportedTypeError{Type: key.Type()} +} + +func (encoder *mapEncoder) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, encoder) +} + +func (encoder *mapEncoder) IsEmpty(ptr unsafe.Pointer) bool { + mapInterface := encoder.mapInterface + mapInterface.word = ptr + realInterface := (*interface{})(unsafe.Pointer(&mapInterface)) + realVal := reflect.ValueOf(*realInterface) + return realVal.Len() == 0 +} + +type sortKeysMapEncoder struct { + mapType reflect.Type + elemType reflect.Type + elemEncoder ValEncoder + mapInterface emptyInterface +} + +func (encoder *sortKeysMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + mapInterface := encoder.mapInterface + mapInterface.word = ptr + realInterface := (*interface{})(unsafe.Pointer(&mapInterface)) + realVal := reflect.ValueOf(*realInterface) + + // Extract and sort the keys. + keys := realVal.MapKeys() + sv := stringValues(make([]reflectWithString, len(keys))) + for i, v := range keys { + sv[i].v = v + if err := sv[i].resolve(); err != nil { + stream.Error = err + return + } + } + sort.Sort(sv) + + stream.WriteObjectStart() + for i, key := range sv { + if i != 0 { + stream.WriteMore() + } + stream.WriteVal(key.s) // might need html escape, so can not WriteString directly + if stream.indention > 0 { + stream.writeTwoBytes(byte(':'), byte(' ')) + } else { + stream.writeByte(':') + } + val := realVal.MapIndex(key.v).Interface() + encoder.elemEncoder.EncodeInterface(val, stream) + } + stream.WriteObjectEnd() +} + +// stringValues is a slice of reflect.Value holding *reflect.StringValue. +// It implements the methods to sort by string. +type stringValues []reflectWithString + +type reflectWithString struct { + v reflect.Value + s string +} + +func (w *reflectWithString) resolve() error { + if w.v.Kind() == reflect.String { + w.s = w.v.String() + return nil + } + if tm, ok := w.v.Interface().(encoding.TextMarshaler); ok { + buf, err := tm.MarshalText() + w.s = string(buf) + return err + } + switch w.v.Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + w.s = strconv.FormatInt(w.v.Int(), 10) + return nil + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + w.s = strconv.FormatUint(w.v.Uint(), 10) + return nil + } + return &json.UnsupportedTypeError{Type: w.v.Type()} +} + +func (sv stringValues) Len() int { return len(sv) } +func (sv stringValues) Swap(i, j int) { sv[i], sv[j] = sv[j], sv[i] } +func (sv stringValues) Less(i, j int) bool { return sv[i].s < sv[j].s } + +func (encoder *sortKeysMapEncoder) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, encoder) +} + +func (encoder *sortKeysMapEncoder) IsEmpty(ptr unsafe.Pointer) bool { + mapInterface := encoder.mapInterface + mapInterface.word = ptr + realInterface := (*interface{})(unsafe.Pointer(&mapInterface)) + realVal := reflect.ValueOf(*realInterface) + return realVal.Len() == 0 +} diff --git a/vendor/github.com/json-iterator/go/feature_reflect_native.go b/vendor/github.com/json-iterator/go/feature_reflect_native.go new file mode 100644 index 000000000..95bd1e87c --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_reflect_native.go @@ -0,0 +1,764 @@ +package jsoniter + +import ( + "encoding" + "encoding/base64" + "encoding/json" + "reflect" + "unsafe" +) + +type stringCodec struct { +} + +func (codec *stringCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + *((*string)(ptr)) = iter.ReadString() +} + +func (codec *stringCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + str := *((*string)(ptr)) + stream.WriteString(str) +} + +func (codec *stringCodec) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, codec) +} + +func (codec *stringCodec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*string)(ptr)) == "" +} + +type intCodec struct { +} + +func (codec *intCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*int)(ptr)) = iter.ReadInt() + } +} + +func (codec *intCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteInt(*((*int)(ptr))) +} + +func (codec *intCodec) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, codec) +} + +func (codec *intCodec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*int)(ptr)) == 0 +} + +type uintptrCodec struct { +} + +func (codec *uintptrCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*uintptr)(ptr)) = uintptr(iter.ReadUint64()) + } +} + +func (codec *uintptrCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteUint64(uint64(*((*uintptr)(ptr)))) +} + +func (codec *uintptrCodec) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, codec) +} + +func (codec *uintptrCodec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*uintptr)(ptr)) == 0 +} + +type int8Codec struct { +} + +func (codec *int8Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*int8)(ptr)) = iter.ReadInt8() + } +} + +func (codec *int8Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteInt8(*((*int8)(ptr))) +} + +func (codec *int8Codec) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, codec) +} + +func (codec *int8Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*int8)(ptr)) == 0 +} + +type int16Codec struct { +} + +func (codec *int16Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*int16)(ptr)) = iter.ReadInt16() + } +} + +func (codec *int16Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteInt16(*((*int16)(ptr))) +} + +func (codec *int16Codec) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, codec) +} + +func (codec *int16Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*int16)(ptr)) == 0 +} + +type int32Codec struct { +} + +func (codec *int32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*int32)(ptr)) = iter.ReadInt32() + } +} + +func (codec *int32Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteInt32(*((*int32)(ptr))) +} + +func (codec *int32Codec) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, codec) +} + +func (codec *int32Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*int32)(ptr)) == 0 +} + +type int64Codec struct { +} + +func (codec *int64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*int64)(ptr)) = iter.ReadInt64() + } +} + +func (codec *int64Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteInt64(*((*int64)(ptr))) +} + +func (codec *int64Codec) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, codec) +} + +func (codec *int64Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*int64)(ptr)) == 0 +} + +type uintCodec struct { +} + +func (codec *uintCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*uint)(ptr)) = iter.ReadUint() + return + } +} + +func (codec *uintCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteUint(*((*uint)(ptr))) +} + +func (codec *uintCodec) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, codec) +} + +func (codec *uintCodec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*uint)(ptr)) == 0 +} + +type uint8Codec struct { +} + +func (codec *uint8Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*uint8)(ptr)) = iter.ReadUint8() + } +} + +func (codec *uint8Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteUint8(*((*uint8)(ptr))) +} + +func (codec *uint8Codec) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, codec) +} + +func (codec *uint8Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*uint8)(ptr)) == 0 +} + +type uint16Codec struct { +} + +func (codec *uint16Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*uint16)(ptr)) = iter.ReadUint16() + } +} + +func (codec *uint16Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteUint16(*((*uint16)(ptr))) +} + +func (codec *uint16Codec) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, codec) +} + +func (codec *uint16Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*uint16)(ptr)) == 0 +} + +type uint32Codec struct { +} + +func (codec *uint32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*uint32)(ptr)) = iter.ReadUint32() + } +} + +func (codec *uint32Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteUint32(*((*uint32)(ptr))) +} + +func (codec *uint32Codec) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, codec) +} + +func (codec *uint32Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*uint32)(ptr)) == 0 +} + +type uint64Codec struct { +} + +func (codec *uint64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*uint64)(ptr)) = iter.ReadUint64() + } +} + +func (codec *uint64Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteUint64(*((*uint64)(ptr))) +} + +func (codec *uint64Codec) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, codec) +} + +func (codec *uint64Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*uint64)(ptr)) == 0 +} + +type float32Codec struct { +} + +func (codec *float32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*float32)(ptr)) = iter.ReadFloat32() + } +} + +func (codec *float32Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteFloat32(*((*float32)(ptr))) +} + +func (codec *float32Codec) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, codec) +} + +func (codec *float32Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*float32)(ptr)) == 0 +} + +type float64Codec struct { +} + +func (codec *float64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*float64)(ptr)) = iter.ReadFloat64() + } +} + +func (codec *float64Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteFloat64(*((*float64)(ptr))) +} + +func (codec *float64Codec) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, codec) +} + +func (codec *float64Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*float64)(ptr)) == 0 +} + +type boolCodec struct { +} + +func (codec *boolCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*bool)(ptr)) = iter.ReadBool() + } +} + +func (codec *boolCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteBool(*((*bool)(ptr))) +} + +func (codec *boolCodec) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, codec) +} + +func (codec *boolCodec) IsEmpty(ptr unsafe.Pointer) bool { + return !(*((*bool)(ptr))) +} + +type emptyInterfaceCodec struct { +} + +func (codec *emptyInterfaceCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + existing := *((*interface{})(ptr)) + + // Checking for both typed and untyped nil pointers. + if existing != nil && + reflect.TypeOf(existing).Kind() == reflect.Ptr && + !reflect.ValueOf(existing).IsNil() { + + var ptrToExisting interface{} + for { + elem := reflect.ValueOf(existing).Elem() + if elem.Kind() != reflect.Ptr || elem.IsNil() { + break + } + ptrToExisting = existing + existing = elem.Interface() + } + + if iter.ReadNil() { + if ptrToExisting != nil { + nilPtr := reflect.Zero(reflect.TypeOf(ptrToExisting).Elem()) + reflect.ValueOf(ptrToExisting).Elem().Set(nilPtr) + } else { + *((*interface{})(ptr)) = nil + } + } else { + iter.ReadVal(existing) + } + + return + } + + if iter.ReadNil() { + *((*interface{})(ptr)) = nil + } else { + *((*interface{})(ptr)) = iter.Read() + } +} + +func (codec *emptyInterfaceCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteVal(*((*interface{})(ptr))) +} + +func (codec *emptyInterfaceCodec) EncodeInterface(val interface{}, stream *Stream) { + stream.WriteVal(val) +} + +func (codec *emptyInterfaceCodec) IsEmpty(ptr unsafe.Pointer) bool { + emptyInterface := (*emptyInterface)(ptr) + return emptyInterface.typ == nil +} + +type nonEmptyInterfaceCodec struct { +} + +func (codec *nonEmptyInterfaceCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + nonEmptyInterface := (*nonEmptyInterface)(ptr) + if nonEmptyInterface.itab == nil { + iter.ReportError("read non-empty interface", "do not know which concrete type to decode to") + return + } + var i interface{} + e := (*emptyInterface)(unsafe.Pointer(&i)) + e.typ = nonEmptyInterface.itab.typ + e.word = nonEmptyInterface.word + iter.ReadVal(&i) + if e.word == nil { + nonEmptyInterface.itab = nil + } + nonEmptyInterface.word = e.word +} + +func (codec *nonEmptyInterfaceCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + nonEmptyInterface := (*nonEmptyInterface)(ptr) + var i interface{} + if nonEmptyInterface.itab != nil { + e := (*emptyInterface)(unsafe.Pointer(&i)) + e.typ = nonEmptyInterface.itab.typ + e.word = nonEmptyInterface.word + } + stream.WriteVal(i) +} + +func (codec *nonEmptyInterfaceCodec) EncodeInterface(val interface{}, stream *Stream) { + stream.WriteVal(val) +} + +func (codec *nonEmptyInterfaceCodec) IsEmpty(ptr unsafe.Pointer) bool { + nonEmptyInterface := (*nonEmptyInterface)(ptr) + return nonEmptyInterface.word == nil +} + +type anyCodec struct { +} + +func (codec *anyCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + *((*Any)(ptr)) = iter.ReadAny() +} + +func (codec *anyCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + (*((*Any)(ptr))).WriteTo(stream) +} + +func (codec *anyCodec) EncodeInterface(val interface{}, stream *Stream) { + (val.(Any)).WriteTo(stream) +} + +func (codec *anyCodec) IsEmpty(ptr unsafe.Pointer) bool { + return (*((*Any)(ptr))).Size() == 0 +} + +type jsonNumberCodec struct { +} + +func (codec *jsonNumberCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + switch iter.WhatIsNext() { + case StringValue: + *((*json.Number)(ptr)) = json.Number(iter.ReadString()) + case NilValue: + iter.skipFourBytes('n', 'u', 'l', 'l') + *((*json.Number)(ptr)) = "" + default: + *((*json.Number)(ptr)) = json.Number([]byte(iter.readNumberAsString())) + } +} + +func (codec *jsonNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteRaw(string(*((*json.Number)(ptr)))) +} + +func (codec *jsonNumberCodec) EncodeInterface(val interface{}, stream *Stream) { + stream.WriteRaw(string(val.(json.Number))) +} + +func (codec *jsonNumberCodec) IsEmpty(ptr unsafe.Pointer) bool { + return len(*((*json.Number)(ptr))) == 0 +} + +type jsoniterNumberCodec struct { +} + +func (codec *jsoniterNumberCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + switch iter.WhatIsNext() { + case StringValue: + *((*Number)(ptr)) = Number(iter.ReadString()) + case NilValue: + iter.skipFourBytes('n', 'u', 'l', 'l') + *((*Number)(ptr)) = "" + default: + *((*Number)(ptr)) = Number([]byte(iter.readNumberAsString())) + } +} + +func (codec *jsoniterNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteRaw(string(*((*Number)(ptr)))) +} + +func (codec *jsoniterNumberCodec) EncodeInterface(val interface{}, stream *Stream) { + stream.WriteRaw(string(val.(Number))) +} + +func (codec *jsoniterNumberCodec) IsEmpty(ptr unsafe.Pointer) bool { + return len(*((*Number)(ptr))) == 0 +} + +type jsonRawMessageCodec struct { +} + +func (codec *jsonRawMessageCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + *((*json.RawMessage)(ptr)) = json.RawMessage(iter.SkipAndReturnBytes()) +} + +func (codec *jsonRawMessageCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteRaw(string(*((*json.RawMessage)(ptr)))) +} + +func (codec *jsonRawMessageCodec) EncodeInterface(val interface{}, stream *Stream) { + stream.WriteRaw(string(val.(json.RawMessage))) +} + +func (codec *jsonRawMessageCodec) IsEmpty(ptr unsafe.Pointer) bool { + return len(*((*json.RawMessage)(ptr))) == 0 +} + +type jsoniterRawMessageCodec struct { +} + +func (codec *jsoniterRawMessageCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + *((*RawMessage)(ptr)) = RawMessage(iter.SkipAndReturnBytes()) +} + +func (codec *jsoniterRawMessageCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteRaw(string(*((*RawMessage)(ptr)))) +} + +func (codec *jsoniterRawMessageCodec) EncodeInterface(val interface{}, stream *Stream) { + stream.WriteRaw(string(val.(RawMessage))) +} + +func (codec *jsoniterRawMessageCodec) IsEmpty(ptr unsafe.Pointer) bool { + return len(*((*RawMessage)(ptr))) == 0 +} + +type base64Codec struct { + sliceDecoder ValDecoder +} + +func (codec *base64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if iter.ReadNil() { + ptrSlice := (*sliceHeader)(ptr) + ptrSlice.Len = 0 + ptrSlice.Cap = 0 + ptrSlice.Data = nil + return + } + switch iter.WhatIsNext() { + case StringValue: + encoding := base64.StdEncoding + src := iter.SkipAndReturnBytes() + src = src[1 : len(src)-1] + decodedLen := encoding.DecodedLen(len(src)) + dst := make([]byte, decodedLen) + len, err := encoding.Decode(dst, src) + if err != nil { + iter.ReportError("decode base64", err.Error()) + } else { + dst = dst[:len] + dstSlice := (*sliceHeader)(unsafe.Pointer(&dst)) + ptrSlice := (*sliceHeader)(ptr) + ptrSlice.Data = dstSlice.Data + ptrSlice.Cap = dstSlice.Cap + ptrSlice.Len = dstSlice.Len + } + case ArrayValue: + codec.sliceDecoder.Decode(ptr, iter) + default: + iter.ReportError("base64Codec", "invalid input") + } +} + +func (codec *base64Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + src := *((*[]byte)(ptr)) + if len(src) == 0 { + stream.WriteNil() + return + } + encoding := base64.StdEncoding + stream.writeByte('"') + toGrow := encoding.EncodedLen(len(src)) + stream.ensure(toGrow) + encoding.Encode(stream.buf[stream.n:], src) + stream.n += toGrow + stream.writeByte('"') +} + +func (codec *base64Codec) EncodeInterface(val interface{}, stream *Stream) { + ptr := extractInterface(val).word + src := *((*[]byte)(ptr)) + if len(src) == 0 { + stream.WriteNil() + return + } + encoding := base64.StdEncoding + stream.writeByte('"') + toGrow := encoding.EncodedLen(len(src)) + stream.ensure(toGrow) + encoding.Encode(stream.buf[stream.n:], src) + stream.n += toGrow + stream.writeByte('"') +} + +func (codec *base64Codec) IsEmpty(ptr unsafe.Pointer) bool { + return len(*((*[]byte)(ptr))) == 0 +} + +type stringModeNumberDecoder struct { + elemDecoder ValDecoder +} + +func (decoder *stringModeNumberDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + c := iter.nextToken() + if c != '"' { + iter.ReportError("stringModeNumberDecoder", `expect ", but found `+string([]byte{c})) + return + } + decoder.elemDecoder.Decode(ptr, iter) + if iter.Error != nil { + return + } + c = iter.readByte() + if c != '"' { + iter.ReportError("stringModeNumberDecoder", `expect ", but found `+string([]byte{c})) + return + } +} + +type stringModeStringDecoder struct { + elemDecoder ValDecoder + cfg *frozenConfig +} + +func (decoder *stringModeStringDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + decoder.elemDecoder.Decode(ptr, iter) + str := *((*string)(ptr)) + tempIter := decoder.cfg.BorrowIterator([]byte(str)) + defer decoder.cfg.ReturnIterator(tempIter) + *((*string)(ptr)) = tempIter.ReadString() +} + +type stringModeNumberEncoder struct { + elemEncoder ValEncoder +} + +func (encoder *stringModeNumberEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.writeByte('"') + encoder.elemEncoder.Encode(ptr, stream) + stream.writeByte('"') +} + +func (encoder *stringModeNumberEncoder) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, encoder) +} + +func (encoder *stringModeNumberEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return encoder.elemEncoder.IsEmpty(ptr) +} + +type stringModeStringEncoder struct { + elemEncoder ValEncoder + cfg *frozenConfig +} + +func (encoder *stringModeStringEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + tempStream := encoder.cfg.BorrowStream(nil) + defer encoder.cfg.ReturnStream(tempStream) + encoder.elemEncoder.Encode(ptr, tempStream) + stream.WriteString(string(tempStream.Buffer())) +} + +func (encoder *stringModeStringEncoder) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, encoder) +} + +func (encoder *stringModeStringEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return encoder.elemEncoder.IsEmpty(ptr) +} + +type marshalerEncoder struct { + templateInterface emptyInterface + checkIsEmpty checkIsEmpty +} + +func (encoder *marshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + templateInterface := encoder.templateInterface + templateInterface.word = ptr + realInterface := (*interface{})(unsafe.Pointer(&templateInterface)) + marshaler, ok := (*realInterface).(json.Marshaler) + if !ok { + stream.WriteVal(nil) + return + } + + bytes, err := marshaler.MarshalJSON() + if err != nil { + stream.Error = err + } else { + stream.Write(bytes) + } +} +func (encoder *marshalerEncoder) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, encoder) +} + +func (encoder *marshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return encoder.checkIsEmpty.IsEmpty(ptr) +} + +type textMarshalerEncoder struct { + templateInterface emptyInterface + checkIsEmpty checkIsEmpty +} + +func (encoder *textMarshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + templateInterface := encoder.templateInterface + templateInterface.word = ptr + realInterface := (*interface{})(unsafe.Pointer(&templateInterface)) + marshaler := (*realInterface).(encoding.TextMarshaler) + bytes, err := marshaler.MarshalText() + if err != nil { + stream.Error = err + } else { + stream.WriteString(string(bytes)) + } +} + +func (encoder *textMarshalerEncoder) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, encoder) +} + +func (encoder *textMarshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return encoder.checkIsEmpty.IsEmpty(ptr) +} + +type unmarshalerDecoder struct { + templateInterface emptyInterface +} + +func (decoder *unmarshalerDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + templateInterface := decoder.templateInterface + templateInterface.word = ptr + realInterface := (*interface{})(unsafe.Pointer(&templateInterface)) + unmarshaler := (*realInterface).(json.Unmarshaler) + iter.nextToken() + iter.unreadByte() // skip spaces + bytes := iter.SkipAndReturnBytes() + err := unmarshaler.UnmarshalJSON(bytes) + if err != nil { + iter.ReportError("unmarshalerDecoder", err.Error()) + } +} + +type textUnmarshalerDecoder struct { + templateInterface emptyInterface +} + +func (decoder *textUnmarshalerDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + templateInterface := decoder.templateInterface + templateInterface.word = ptr + realInterface := (*interface{})(unsafe.Pointer(&templateInterface)) + unmarshaler := (*realInterface).(encoding.TextUnmarshaler) + str := iter.ReadString() + err := unmarshaler.UnmarshalText([]byte(str)) + if err != nil { + iter.ReportError("textUnmarshalerDecoder", err.Error()) + } +} diff --git a/vendor/github.com/json-iterator/go/feature_reflect_object.go b/vendor/github.com/json-iterator/go/feature_reflect_object.go new file mode 100644 index 000000000..59b1235c0 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_reflect_object.go @@ -0,0 +1,196 @@ +package jsoniter + +import ( + "fmt" + "io" + "reflect" + "strings" + "unsafe" +) + +func encoderOfStruct(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) { + type bindingTo struct { + binding *Binding + toName string + ignored bool + } + orderedBindings := []*bindingTo{} + structDescriptor, err := describeStruct(cfg, typ) + if err != nil { + return nil, err + } + for _, binding := range structDescriptor.Fields { + for _, toName := range binding.ToNames { + new := &bindingTo{ + binding: binding, + toName: toName, + } + for _, old := range orderedBindings { + if old.toName != toName { + continue + } + old.ignored, new.ignored = resolveConflictBinding(cfg, old.binding, new.binding) + } + orderedBindings = append(orderedBindings, new) + } + } + if len(orderedBindings) == 0 { + return &emptyStructEncoder{}, nil + } + finalOrderedFields := []structFieldTo{} + for _, bindingTo := range orderedBindings { + if !bindingTo.ignored { + finalOrderedFields = append(finalOrderedFields, structFieldTo{ + encoder: bindingTo.binding.Encoder.(*structFieldEncoder), + toName: bindingTo.toName, + }) + } + } + return &structEncoder{structDescriptor.onePtrEmbedded, structDescriptor.onePtrOptimization, finalOrderedFields}, nil +} + +func resolveConflictBinding(cfg *frozenConfig, old, new *Binding) (ignoreOld, ignoreNew bool) { + newTagged := new.Field.Tag.Get(cfg.getTagKey()) != "" + oldTagged := old.Field.Tag.Get(cfg.getTagKey()) != "" + if newTagged { + if oldTagged { + if len(old.levels) > len(new.levels) { + return true, false + } else if len(new.levels) > len(old.levels) { + return false, true + } else { + return true, true + } + } else { + return true, false + } + } else { + if oldTagged { + return true, false + } + if len(old.levels) > len(new.levels) { + return true, false + } else if len(new.levels) > len(old.levels) { + return false, true + } else { + return true, true + } + } +} + +func decoderOfStruct(cfg *frozenConfig, typ reflect.Type) (ValDecoder, error) { + bindings := map[string]*Binding{} + structDescriptor, err := describeStruct(cfg, typ) + if err != nil { + return nil, err + } + for _, binding := range structDescriptor.Fields { + for _, fromName := range binding.FromNames { + old := bindings[fromName] + if old == nil { + bindings[fromName] = binding + continue + } + ignoreOld, ignoreNew := resolveConflictBinding(cfg, old, binding) + if ignoreOld { + delete(bindings, fromName) + } + if !ignoreNew { + bindings[fromName] = binding + } + } + } + fields := map[string]*structFieldDecoder{} + for k, binding := range bindings { + fields[strings.ToLower(k)] = binding.Decoder.(*structFieldDecoder) + } + return createStructDecoder(typ, fields) +} + +type structFieldEncoder struct { + field *reflect.StructField + fieldEncoder ValEncoder + omitempty bool +} + +func (encoder *structFieldEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + fieldPtr := unsafe.Pointer(uintptr(ptr) + encoder.field.Offset) + encoder.fieldEncoder.Encode(fieldPtr, stream) + if stream.Error != nil && stream.Error != io.EOF { + stream.Error = fmt.Errorf("%s: %s", encoder.field.Name, stream.Error.Error()) + } +} + +func (encoder *structFieldEncoder) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, encoder) +} + +func (encoder *structFieldEncoder) IsEmpty(ptr unsafe.Pointer) bool { + fieldPtr := unsafe.Pointer(uintptr(ptr) + encoder.field.Offset) + return encoder.fieldEncoder.IsEmpty(fieldPtr) +} + +type structEncoder struct { + onePtrEmbedded bool + onePtrOptimization bool + fields []structFieldTo +} + +type structFieldTo struct { + encoder *structFieldEncoder + toName string +} + +func (encoder *structEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteObjectStart() + isNotFirst := false + for _, field := range encoder.fields { + if field.encoder.omitempty && field.encoder.IsEmpty(ptr) { + continue + } + if isNotFirst { + stream.WriteMore() + } + stream.WriteObjectField(field.toName) + field.encoder.Encode(ptr, stream) + isNotFirst = true + } + stream.WriteObjectEnd() +} + +func (encoder *structEncoder) EncodeInterface(val interface{}, stream *Stream) { + e := (*emptyInterface)(unsafe.Pointer(&val)) + if encoder.onePtrOptimization { + if e.word == nil && encoder.onePtrEmbedded { + stream.WriteObjectStart() + stream.WriteObjectEnd() + return + } + ptr := uintptr(e.word) + e.word = unsafe.Pointer(&ptr) + } + if reflect.TypeOf(val).Kind() == reflect.Ptr { + encoder.Encode(unsafe.Pointer(&e.word), stream) + } else { + encoder.Encode(e.word, stream) + } +} + +func (encoder *structEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return false +} + +type emptyStructEncoder struct { +} + +func (encoder *emptyStructEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteEmptyObject() +} + +func (encoder *emptyStructEncoder) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, encoder) +} + +func (encoder *emptyStructEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return false +} diff --git a/vendor/github.com/json-iterator/go/feature_reflect_slice.go b/vendor/github.com/json-iterator/go/feature_reflect_slice.go new file mode 100644 index 000000000..51a8daecf --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_reflect_slice.go @@ -0,0 +1,147 @@ +package jsoniter + +import ( + "fmt" + "io" + "reflect" + "unsafe" +) + +func decoderOfSlice(cfg *frozenConfig, typ reflect.Type) (ValDecoder, error) { + decoder, err := decoderOfType(cfg, typ.Elem()) + if err != nil { + return nil, err + } + return &sliceDecoder{typ, typ.Elem(), decoder}, nil +} + +func encoderOfSlice(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) { + encoder, err := encoderOfType(cfg, typ.Elem()) + if err != nil { + return nil, err + } + if typ.Elem().Kind() == reflect.Map { + encoder = &OptionalEncoder{encoder} + } + return &sliceEncoder{typ, typ.Elem(), encoder}, nil +} + +type sliceEncoder struct { + sliceType reflect.Type + elemType reflect.Type + elemEncoder ValEncoder +} + +func (encoder *sliceEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + slice := (*sliceHeader)(ptr) + if slice.Data == nil { + stream.WriteNil() + return + } + if slice.Len == 0 { + stream.WriteEmptyArray() + return + } + stream.WriteArrayStart() + elemPtr := unsafe.Pointer(slice.Data) + encoder.elemEncoder.Encode(unsafe.Pointer(elemPtr), stream) + for i := 1; i < slice.Len; i++ { + stream.WriteMore() + elemPtr = unsafe.Pointer(uintptr(elemPtr) + encoder.elemType.Size()) + encoder.elemEncoder.Encode(unsafe.Pointer(elemPtr), stream) + } + stream.WriteArrayEnd() + if stream.Error != nil && stream.Error != io.EOF { + stream.Error = fmt.Errorf("%v: %s", encoder.sliceType, stream.Error.Error()) + } +} + +func (encoder *sliceEncoder) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, encoder) +} + +func (encoder *sliceEncoder) IsEmpty(ptr unsafe.Pointer) bool { + slice := (*sliceHeader)(ptr) + return slice.Len == 0 +} + +type sliceDecoder struct { + sliceType reflect.Type + elemType reflect.Type + elemDecoder ValDecoder +} + +// sliceHeader is a safe version of SliceHeader used within this package. +type sliceHeader struct { + Data unsafe.Pointer + Len int + Cap int +} + +func (decoder *sliceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + decoder.doDecode(ptr, iter) + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v: %s", decoder.sliceType, iter.Error.Error()) + } +} + +func (decoder *sliceDecoder) doDecode(ptr unsafe.Pointer, iter *Iterator) { + slice := (*sliceHeader)(ptr) + if iter.ReadNil() { + slice.Len = 0 + slice.Cap = 0 + slice.Data = nil + return + } + reuseSlice(slice, decoder.sliceType, 4) + slice.Len = 0 + offset := uintptr(0) + iter.ReadArrayCB(func(iter *Iterator) bool { + growOne(slice, decoder.sliceType, decoder.elemType) + decoder.elemDecoder.Decode(unsafe.Pointer(uintptr(slice.Data)+offset), iter) + offset += decoder.elemType.Size() + return true + }) +} + +// grow grows the slice s so that it can hold extra more values, allocating +// more capacity if needed. It also returns the old and new slice lengths. +func growOne(slice *sliceHeader, sliceType reflect.Type, elementType reflect.Type) { + newLen := slice.Len + 1 + if newLen <= slice.Cap { + slice.Len = newLen + return + } + newCap := slice.Cap + if newCap == 0 { + newCap = 1 + } else { + for newCap < newLen { + if slice.Len < 1024 { + newCap += newCap + } else { + newCap += newCap / 4 + } + } + } + newVal := reflect.MakeSlice(sliceType, newLen, newCap) + dst := unsafe.Pointer(newVal.Pointer()) + // copy old array into new array + originalBytesCount := slice.Len * int(elementType.Size()) + srcSliceHeader := (unsafe.Pointer)(&sliceHeader{slice.Data, originalBytesCount, originalBytesCount}) + dstSliceHeader := (unsafe.Pointer)(&sliceHeader{dst, originalBytesCount, originalBytesCount}) + copy(*(*[]byte)(dstSliceHeader), *(*[]byte)(srcSliceHeader)) + slice.Data = dst + slice.Len = newLen + slice.Cap = newCap +} + +func reuseSlice(slice *sliceHeader, sliceType reflect.Type, expectedCap int) { + if expectedCap <= slice.Cap { + return + } + newVal := reflect.MakeSlice(sliceType, 0, expectedCap) + dst := unsafe.Pointer(newVal.Pointer()) + slice.Data = dst + slice.Cap = expectedCap +} diff --git a/vendor/github.com/json-iterator/go/feature_reflect_struct_decoder.go b/vendor/github.com/json-iterator/go/feature_reflect_struct_decoder.go new file mode 100644 index 000000000..e6ced77c2 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_reflect_struct_decoder.go @@ -0,0 +1,934 @@ +package jsoniter + +import ( + "fmt" + "io" + "reflect" + "strings" + "unsafe" +) + +func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder) (ValDecoder, error) { + knownHash := map[int32]struct{}{ + 0: {}, + } + switch len(fields) { + case 0: + return &skipObjectDecoder{typ}, nil + case 1: + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields}, nil + } + knownHash[fieldHash] = struct{}{} + return &oneFieldStructDecoder{typ, fieldHash, fieldDecoder}, nil + } + case 2: + var fieldHash1 int32 + var fieldHash2 int32 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields}, nil + } + knownHash[fieldHash] = struct{}{} + if fieldHash1 == 0 { + fieldHash1 = fieldHash + fieldDecoder1 = fieldDecoder + } else { + fieldHash2 = fieldHash + fieldDecoder2 = fieldDecoder + } + } + return &twoFieldsStructDecoder{typ, fieldHash1, fieldDecoder1, fieldHash2, fieldDecoder2}, nil + case 3: + var fieldName1 int32 + var fieldName2 int32 + var fieldName3 int32 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + var fieldDecoder3 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields}, nil + } + knownHash[fieldHash] = struct{}{} + if fieldName1 == 0 { + fieldName1 = fieldHash + fieldDecoder1 = fieldDecoder + } else if fieldName2 == 0 { + fieldName2 = fieldHash + fieldDecoder2 = fieldDecoder + } else { + fieldName3 = fieldHash + fieldDecoder3 = fieldDecoder + } + } + return &threeFieldsStructDecoder{typ, + fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3}, nil + case 4: + var fieldName1 int32 + var fieldName2 int32 + var fieldName3 int32 + var fieldName4 int32 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + var fieldDecoder3 *structFieldDecoder + var fieldDecoder4 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields}, nil + } + knownHash[fieldHash] = struct{}{} + if fieldName1 == 0 { + fieldName1 = fieldHash + fieldDecoder1 = fieldDecoder + } else if fieldName2 == 0 { + fieldName2 = fieldHash + fieldDecoder2 = fieldDecoder + } else if fieldName3 == 0 { + fieldName3 = fieldHash + fieldDecoder3 = fieldDecoder + } else { + fieldName4 = fieldHash + fieldDecoder4 = fieldDecoder + } + } + return &fourFieldsStructDecoder{typ, + fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3, + fieldName4, fieldDecoder4}, nil + case 5: + var fieldName1 int32 + var fieldName2 int32 + var fieldName3 int32 + var fieldName4 int32 + var fieldName5 int32 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + var fieldDecoder3 *structFieldDecoder + var fieldDecoder4 *structFieldDecoder + var fieldDecoder5 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields}, nil + } + knownHash[fieldHash] = struct{}{} + if fieldName1 == 0 { + fieldName1 = fieldHash + fieldDecoder1 = fieldDecoder + } else if fieldName2 == 0 { + fieldName2 = fieldHash + fieldDecoder2 = fieldDecoder + } else if fieldName3 == 0 { + fieldName3 = fieldHash + fieldDecoder3 = fieldDecoder + } else if fieldName4 == 0 { + fieldName4 = fieldHash + fieldDecoder4 = fieldDecoder + } else { + fieldName5 = fieldHash + fieldDecoder5 = fieldDecoder + } + } + return &fiveFieldsStructDecoder{typ, + fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3, + fieldName4, fieldDecoder4, fieldName5, fieldDecoder5}, nil + case 6: + var fieldName1 int32 + var fieldName2 int32 + var fieldName3 int32 + var fieldName4 int32 + var fieldName5 int32 + var fieldName6 int32 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + var fieldDecoder3 *structFieldDecoder + var fieldDecoder4 *structFieldDecoder + var fieldDecoder5 *structFieldDecoder + var fieldDecoder6 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields}, nil + } + knownHash[fieldHash] = struct{}{} + if fieldName1 == 0 { + fieldName1 = fieldHash + fieldDecoder1 = fieldDecoder + } else if fieldName2 == 0 { + fieldName2 = fieldHash + fieldDecoder2 = fieldDecoder + } else if fieldName3 == 0 { + fieldName3 = fieldHash + fieldDecoder3 = fieldDecoder + } else if fieldName4 == 0 { + fieldName4 = fieldHash + fieldDecoder4 = fieldDecoder + } else if fieldName5 == 0 { + fieldName5 = fieldHash + fieldDecoder5 = fieldDecoder + } else { + fieldName6 = fieldHash + fieldDecoder6 = fieldDecoder + } + } + return &sixFieldsStructDecoder{typ, + fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3, + fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6}, nil + case 7: + var fieldName1 int32 + var fieldName2 int32 + var fieldName3 int32 + var fieldName4 int32 + var fieldName5 int32 + var fieldName6 int32 + var fieldName7 int32 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + var fieldDecoder3 *structFieldDecoder + var fieldDecoder4 *structFieldDecoder + var fieldDecoder5 *structFieldDecoder + var fieldDecoder6 *structFieldDecoder + var fieldDecoder7 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields}, nil + } + knownHash[fieldHash] = struct{}{} + if fieldName1 == 0 { + fieldName1 = fieldHash + fieldDecoder1 = fieldDecoder + } else if fieldName2 == 0 { + fieldName2 = fieldHash + fieldDecoder2 = fieldDecoder + } else if fieldName3 == 0 { + fieldName3 = fieldHash + fieldDecoder3 = fieldDecoder + } else if fieldName4 == 0 { + fieldName4 = fieldHash + fieldDecoder4 = fieldDecoder + } else if fieldName5 == 0 { + fieldName5 = fieldHash + fieldDecoder5 = fieldDecoder + } else if fieldName6 == 0 { + fieldName6 = fieldHash + fieldDecoder6 = fieldDecoder + } else { + fieldName7 = fieldHash + fieldDecoder7 = fieldDecoder + } + } + return &sevenFieldsStructDecoder{typ, + fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3, + fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6, + fieldName7, fieldDecoder7}, nil + case 8: + var fieldName1 int32 + var fieldName2 int32 + var fieldName3 int32 + var fieldName4 int32 + var fieldName5 int32 + var fieldName6 int32 + var fieldName7 int32 + var fieldName8 int32 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + var fieldDecoder3 *structFieldDecoder + var fieldDecoder4 *structFieldDecoder + var fieldDecoder5 *structFieldDecoder + var fieldDecoder6 *structFieldDecoder + var fieldDecoder7 *structFieldDecoder + var fieldDecoder8 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields}, nil + } + knownHash[fieldHash] = struct{}{} + if fieldName1 == 0 { + fieldName1 = fieldHash + fieldDecoder1 = fieldDecoder + } else if fieldName2 == 0 { + fieldName2 = fieldHash + fieldDecoder2 = fieldDecoder + } else if fieldName3 == 0 { + fieldName3 = fieldHash + fieldDecoder3 = fieldDecoder + } else if fieldName4 == 0 { + fieldName4 = fieldHash + fieldDecoder4 = fieldDecoder + } else if fieldName5 == 0 { + fieldName5 = fieldHash + fieldDecoder5 = fieldDecoder + } else if fieldName6 == 0 { + fieldName6 = fieldHash + fieldDecoder6 = fieldDecoder + } else if fieldName7 == 0 { + fieldName7 = fieldHash + fieldDecoder7 = fieldDecoder + } else { + fieldName8 = fieldHash + fieldDecoder8 = fieldDecoder + } + } + return &eightFieldsStructDecoder{typ, + fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3, + fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6, + fieldName7, fieldDecoder7, fieldName8, fieldDecoder8}, nil + case 9: + var fieldName1 int32 + var fieldName2 int32 + var fieldName3 int32 + var fieldName4 int32 + var fieldName5 int32 + var fieldName6 int32 + var fieldName7 int32 + var fieldName8 int32 + var fieldName9 int32 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + var fieldDecoder3 *structFieldDecoder + var fieldDecoder4 *structFieldDecoder + var fieldDecoder5 *structFieldDecoder + var fieldDecoder6 *structFieldDecoder + var fieldDecoder7 *structFieldDecoder + var fieldDecoder8 *structFieldDecoder + var fieldDecoder9 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields}, nil + } + knownHash[fieldHash] = struct{}{} + if fieldName1 == 0 { + fieldName1 = fieldHash + fieldDecoder1 = fieldDecoder + } else if fieldName2 == 0 { + fieldName2 = fieldHash + fieldDecoder2 = fieldDecoder + } else if fieldName3 == 0 { + fieldName3 = fieldHash + fieldDecoder3 = fieldDecoder + } else if fieldName4 == 0 { + fieldName4 = fieldHash + fieldDecoder4 = fieldDecoder + } else if fieldName5 == 0 { + fieldName5 = fieldHash + fieldDecoder5 = fieldDecoder + } else if fieldName6 == 0 { + fieldName6 = fieldHash + fieldDecoder6 = fieldDecoder + } else if fieldName7 == 0 { + fieldName7 = fieldHash + fieldDecoder7 = fieldDecoder + } else if fieldName8 == 0 { + fieldName8 = fieldHash + fieldDecoder8 = fieldDecoder + } else { + fieldName9 = fieldHash + fieldDecoder9 = fieldDecoder + } + } + return &nineFieldsStructDecoder{typ, + fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3, + fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6, + fieldName7, fieldDecoder7, fieldName8, fieldDecoder8, fieldName9, fieldDecoder9}, nil + case 10: + var fieldName1 int32 + var fieldName2 int32 + var fieldName3 int32 + var fieldName4 int32 + var fieldName5 int32 + var fieldName6 int32 + var fieldName7 int32 + var fieldName8 int32 + var fieldName9 int32 + var fieldName10 int32 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + var fieldDecoder3 *structFieldDecoder + var fieldDecoder4 *structFieldDecoder + var fieldDecoder5 *structFieldDecoder + var fieldDecoder6 *structFieldDecoder + var fieldDecoder7 *structFieldDecoder + var fieldDecoder8 *structFieldDecoder + var fieldDecoder9 *structFieldDecoder + var fieldDecoder10 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields}, nil + } + knownHash[fieldHash] = struct{}{} + if fieldName1 == 0 { + fieldName1 = fieldHash + fieldDecoder1 = fieldDecoder + } else if fieldName2 == 0 { + fieldName2 = fieldHash + fieldDecoder2 = fieldDecoder + } else if fieldName3 == 0 { + fieldName3 = fieldHash + fieldDecoder3 = fieldDecoder + } else if fieldName4 == 0 { + fieldName4 = fieldHash + fieldDecoder4 = fieldDecoder + } else if fieldName5 == 0 { + fieldName5 = fieldHash + fieldDecoder5 = fieldDecoder + } else if fieldName6 == 0 { + fieldName6 = fieldHash + fieldDecoder6 = fieldDecoder + } else if fieldName7 == 0 { + fieldName7 = fieldHash + fieldDecoder7 = fieldDecoder + } else if fieldName8 == 0 { + fieldName8 = fieldHash + fieldDecoder8 = fieldDecoder + } else if fieldName9 == 0 { + fieldName9 = fieldHash + fieldDecoder9 = fieldDecoder + } else { + fieldName10 = fieldHash + fieldDecoder10 = fieldDecoder + } + } + return &tenFieldsStructDecoder{typ, + fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3, + fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6, + fieldName7, fieldDecoder7, fieldName8, fieldDecoder8, fieldName9, fieldDecoder9, + fieldName10, fieldDecoder10}, nil + } + return &generalStructDecoder{typ, fields}, nil +} + +type generalStructDecoder struct { + typ reflect.Type + fields map[string]*structFieldDecoder +} + +func (decoder *generalStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + var fieldBytes []byte + var field string + if iter.cfg.objectFieldMustBeSimpleString { + fieldBytes = iter.readObjectFieldAsBytes() + field = *(*string)(unsafe.Pointer(&fieldBytes)) + } else { + field = iter.ReadString() + c := iter.nextToken() + if c != ':' { + iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c})) + } + } + fieldDecoder := decoder.fields[strings.ToLower(field)] + if fieldDecoder == nil { + iter.Skip() + } else { + fieldDecoder.Decode(ptr, iter) + } + for iter.nextToken() == ',' { + if iter.cfg.objectFieldMustBeSimpleString { + fieldBytes := iter.readObjectFieldAsBytes() + field = *(*string)(unsafe.Pointer(&fieldBytes)) + } else { + field = iter.ReadString() + c := iter.nextToken() + if c != ':' { + iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c})) + } + } + fieldDecoder = decoder.fields[strings.ToLower(field)] + if fieldDecoder == nil { + iter.Skip() + } else { + fieldDecoder.Decode(ptr, iter) + } + } + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error()) + } +} + +type skipObjectDecoder struct { + typ reflect.Type +} + +func (decoder *skipObjectDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + valueType := iter.WhatIsNext() + if valueType != ObjectValue && valueType != NilValue { + iter.ReportError("skipObjectDecoder", "expect object or null") + return + } + iter.Skip() +} + +type oneFieldStructDecoder struct { + typ reflect.Type + fieldHash int32 + fieldDecoder *structFieldDecoder +} + +func (decoder *oneFieldStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + for { + if iter.readFieldHash() == decoder.fieldHash { + decoder.fieldDecoder.Decode(ptr, iter) + } else { + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error()) + } +} + +type twoFieldsStructDecoder struct { + typ reflect.Type + fieldHash1 int32 + fieldDecoder1 *structFieldDecoder + fieldHash2 int32 + fieldDecoder2 *structFieldDecoder +} + +func (decoder *twoFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error()) + } +} + +type threeFieldsStructDecoder struct { + typ reflect.Type + fieldHash1 int32 + fieldDecoder1 *structFieldDecoder + fieldHash2 int32 + fieldDecoder2 *structFieldDecoder + fieldHash3 int32 + fieldDecoder3 *structFieldDecoder +} + +func (decoder *threeFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + case decoder.fieldHash3: + decoder.fieldDecoder3.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error()) + } +} + +type fourFieldsStructDecoder struct { + typ reflect.Type + fieldHash1 int32 + fieldDecoder1 *structFieldDecoder + fieldHash2 int32 + fieldDecoder2 *structFieldDecoder + fieldHash3 int32 + fieldDecoder3 *structFieldDecoder + fieldHash4 int32 + fieldDecoder4 *structFieldDecoder +} + +func (decoder *fourFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + case decoder.fieldHash3: + decoder.fieldDecoder3.Decode(ptr, iter) + case decoder.fieldHash4: + decoder.fieldDecoder4.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error()) + } +} + +type fiveFieldsStructDecoder struct { + typ reflect.Type + fieldHash1 int32 + fieldDecoder1 *structFieldDecoder + fieldHash2 int32 + fieldDecoder2 *structFieldDecoder + fieldHash3 int32 + fieldDecoder3 *structFieldDecoder + fieldHash4 int32 + fieldDecoder4 *structFieldDecoder + fieldHash5 int32 + fieldDecoder5 *structFieldDecoder +} + +func (decoder *fiveFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + case decoder.fieldHash3: + decoder.fieldDecoder3.Decode(ptr, iter) + case decoder.fieldHash4: + decoder.fieldDecoder4.Decode(ptr, iter) + case decoder.fieldHash5: + decoder.fieldDecoder5.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error()) + } +} + +type sixFieldsStructDecoder struct { + typ reflect.Type + fieldHash1 int32 + fieldDecoder1 *structFieldDecoder + fieldHash2 int32 + fieldDecoder2 *structFieldDecoder + fieldHash3 int32 + fieldDecoder3 *structFieldDecoder + fieldHash4 int32 + fieldDecoder4 *structFieldDecoder + fieldHash5 int32 + fieldDecoder5 *structFieldDecoder + fieldHash6 int32 + fieldDecoder6 *structFieldDecoder +} + +func (decoder *sixFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + case decoder.fieldHash3: + decoder.fieldDecoder3.Decode(ptr, iter) + case decoder.fieldHash4: + decoder.fieldDecoder4.Decode(ptr, iter) + case decoder.fieldHash5: + decoder.fieldDecoder5.Decode(ptr, iter) + case decoder.fieldHash6: + decoder.fieldDecoder6.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error()) + } +} + +type sevenFieldsStructDecoder struct { + typ reflect.Type + fieldHash1 int32 + fieldDecoder1 *structFieldDecoder + fieldHash2 int32 + fieldDecoder2 *structFieldDecoder + fieldHash3 int32 + fieldDecoder3 *structFieldDecoder + fieldHash4 int32 + fieldDecoder4 *structFieldDecoder + fieldHash5 int32 + fieldDecoder5 *structFieldDecoder + fieldHash6 int32 + fieldDecoder6 *structFieldDecoder + fieldHash7 int32 + fieldDecoder7 *structFieldDecoder +} + +func (decoder *sevenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + case decoder.fieldHash3: + decoder.fieldDecoder3.Decode(ptr, iter) + case decoder.fieldHash4: + decoder.fieldDecoder4.Decode(ptr, iter) + case decoder.fieldHash5: + decoder.fieldDecoder5.Decode(ptr, iter) + case decoder.fieldHash6: + decoder.fieldDecoder6.Decode(ptr, iter) + case decoder.fieldHash7: + decoder.fieldDecoder7.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error()) + } +} + +type eightFieldsStructDecoder struct { + typ reflect.Type + fieldHash1 int32 + fieldDecoder1 *structFieldDecoder + fieldHash2 int32 + fieldDecoder2 *structFieldDecoder + fieldHash3 int32 + fieldDecoder3 *structFieldDecoder + fieldHash4 int32 + fieldDecoder4 *structFieldDecoder + fieldHash5 int32 + fieldDecoder5 *structFieldDecoder + fieldHash6 int32 + fieldDecoder6 *structFieldDecoder + fieldHash7 int32 + fieldDecoder7 *structFieldDecoder + fieldHash8 int32 + fieldDecoder8 *structFieldDecoder +} + +func (decoder *eightFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + case decoder.fieldHash3: + decoder.fieldDecoder3.Decode(ptr, iter) + case decoder.fieldHash4: + decoder.fieldDecoder4.Decode(ptr, iter) + case decoder.fieldHash5: + decoder.fieldDecoder5.Decode(ptr, iter) + case decoder.fieldHash6: + decoder.fieldDecoder6.Decode(ptr, iter) + case decoder.fieldHash7: + decoder.fieldDecoder7.Decode(ptr, iter) + case decoder.fieldHash8: + decoder.fieldDecoder8.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error()) + } +} + +type nineFieldsStructDecoder struct { + typ reflect.Type + fieldHash1 int32 + fieldDecoder1 *structFieldDecoder + fieldHash2 int32 + fieldDecoder2 *structFieldDecoder + fieldHash3 int32 + fieldDecoder3 *structFieldDecoder + fieldHash4 int32 + fieldDecoder4 *structFieldDecoder + fieldHash5 int32 + fieldDecoder5 *structFieldDecoder + fieldHash6 int32 + fieldDecoder6 *structFieldDecoder + fieldHash7 int32 + fieldDecoder7 *structFieldDecoder + fieldHash8 int32 + fieldDecoder8 *structFieldDecoder + fieldHash9 int32 + fieldDecoder9 *structFieldDecoder +} + +func (decoder *nineFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + case decoder.fieldHash3: + decoder.fieldDecoder3.Decode(ptr, iter) + case decoder.fieldHash4: + decoder.fieldDecoder4.Decode(ptr, iter) + case decoder.fieldHash5: + decoder.fieldDecoder5.Decode(ptr, iter) + case decoder.fieldHash6: + decoder.fieldDecoder6.Decode(ptr, iter) + case decoder.fieldHash7: + decoder.fieldDecoder7.Decode(ptr, iter) + case decoder.fieldHash8: + decoder.fieldDecoder8.Decode(ptr, iter) + case decoder.fieldHash9: + decoder.fieldDecoder9.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error()) + } +} + +type tenFieldsStructDecoder struct { + typ reflect.Type + fieldHash1 int32 + fieldDecoder1 *structFieldDecoder + fieldHash2 int32 + fieldDecoder2 *structFieldDecoder + fieldHash3 int32 + fieldDecoder3 *structFieldDecoder + fieldHash4 int32 + fieldDecoder4 *structFieldDecoder + fieldHash5 int32 + fieldDecoder5 *structFieldDecoder + fieldHash6 int32 + fieldDecoder6 *structFieldDecoder + fieldHash7 int32 + fieldDecoder7 *structFieldDecoder + fieldHash8 int32 + fieldDecoder8 *structFieldDecoder + fieldHash9 int32 + fieldDecoder9 *structFieldDecoder + fieldHash10 int32 + fieldDecoder10 *structFieldDecoder +} + +func (decoder *tenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + case decoder.fieldHash3: + decoder.fieldDecoder3.Decode(ptr, iter) + case decoder.fieldHash4: + decoder.fieldDecoder4.Decode(ptr, iter) + case decoder.fieldHash5: + decoder.fieldDecoder5.Decode(ptr, iter) + case decoder.fieldHash6: + decoder.fieldDecoder6.Decode(ptr, iter) + case decoder.fieldHash7: + decoder.fieldDecoder7.Decode(ptr, iter) + case decoder.fieldHash8: + decoder.fieldDecoder8.Decode(ptr, iter) + case decoder.fieldHash9: + decoder.fieldDecoder9.Decode(ptr, iter) + case decoder.fieldHash10: + decoder.fieldDecoder10.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error()) + } +} + +type structFieldDecoder struct { + field *reflect.StructField + fieldDecoder ValDecoder +} + +func (decoder *structFieldDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + fieldPtr := unsafe.Pointer(uintptr(ptr) + decoder.field.Offset) + decoder.fieldDecoder.Decode(fieldPtr, iter) + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%s: %s", decoder.field.Name, iter.Error.Error()) + } +} diff --git a/vendor/github.com/json-iterator/go/feature_stream.go b/vendor/github.com/json-iterator/go/feature_stream.go new file mode 100644 index 000000000..97355eb5b --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_stream.go @@ -0,0 +1,308 @@ +package jsoniter + +import ( + "io" +) + +// stream is a io.Writer like object, with JSON specific write functions. +// Error is not returned as return value, but stored as Error member on this stream instance. +type Stream struct { + cfg *frozenConfig + out io.Writer + buf []byte + n int + Error error + indention int + Attachment interface{} // open for customized encoder +} + +// NewStream create new stream instance. +// cfg can be jsoniter.ConfigDefault. +// out can be nil if write to internal buffer. +// bufSize is the initial size for the internal buffer in bytes. +func NewStream(cfg API, out io.Writer, bufSize int) *Stream { + return &Stream{ + cfg: cfg.(*frozenConfig), + out: out, + buf: make([]byte, bufSize), + n: 0, + Error: nil, + indention: 0, + } +} + +// Pool returns a pool can provide more stream with same configuration +func (stream *Stream) Pool() StreamPool { + return stream.cfg +} + +// Reset reuse this stream instance by assign a new writer +func (stream *Stream) Reset(out io.Writer) { + stream.out = out + stream.n = 0 +} + +// Available returns how many bytes are unused in the buffer. +func (stream *Stream) Available() int { + return len(stream.buf) - stream.n +} + +// Buffered returns the number of bytes that have been written into the current buffer. +func (stream *Stream) Buffered() int { + return stream.n +} + +// Buffer if writer is nil, use this method to take the result +func (stream *Stream) Buffer() []byte { + return stream.buf[:stream.n] +} + +// Write writes the contents of p into the buffer. +// It returns the number of bytes written. +// If nn < len(p), it also returns an error explaining +// why the write is short. +func (stream *Stream) Write(p []byte) (nn int, err error) { + for len(p) > stream.Available() && stream.Error == nil { + if stream.out == nil { + stream.growAtLeast(len(p)) + } else { + var n int + if stream.Buffered() == 0 { + // Large write, empty buffer. + // Write directly from p to avoid copy. + n, stream.Error = stream.out.Write(p) + } else { + n = copy(stream.buf[stream.n:], p) + stream.n += n + stream.Flush() + } + nn += n + p = p[n:] + } + } + if stream.Error != nil { + return nn, stream.Error + } + n := copy(stream.buf[stream.n:], p) + stream.n += n + nn += n + return nn, nil +} + +// WriteByte writes a single byte. +func (stream *Stream) writeByte(c byte) { + if stream.Error != nil { + return + } + if stream.Available() < 1 { + stream.growAtLeast(1) + } + stream.buf[stream.n] = c + stream.n++ +} + +func (stream *Stream) writeTwoBytes(c1 byte, c2 byte) { + if stream.Error != nil { + return + } + if stream.Available() < 2 { + stream.growAtLeast(2) + } + stream.buf[stream.n] = c1 + stream.buf[stream.n+1] = c2 + stream.n += 2 +} + +func (stream *Stream) writeThreeBytes(c1 byte, c2 byte, c3 byte) { + if stream.Error != nil { + return + } + if stream.Available() < 3 { + stream.growAtLeast(3) + } + stream.buf[stream.n] = c1 + stream.buf[stream.n+1] = c2 + stream.buf[stream.n+2] = c3 + stream.n += 3 +} + +func (stream *Stream) writeFourBytes(c1 byte, c2 byte, c3 byte, c4 byte) { + if stream.Error != nil { + return + } + if stream.Available() < 4 { + stream.growAtLeast(4) + } + stream.buf[stream.n] = c1 + stream.buf[stream.n+1] = c2 + stream.buf[stream.n+2] = c3 + stream.buf[stream.n+3] = c4 + stream.n += 4 +} + +func (stream *Stream) writeFiveBytes(c1 byte, c2 byte, c3 byte, c4 byte, c5 byte) { + if stream.Error != nil { + return + } + if stream.Available() < 5 { + stream.growAtLeast(5) + } + stream.buf[stream.n] = c1 + stream.buf[stream.n+1] = c2 + stream.buf[stream.n+2] = c3 + stream.buf[stream.n+3] = c4 + stream.buf[stream.n+4] = c5 + stream.n += 5 +} + +// Flush writes any buffered data to the underlying io.Writer. +func (stream *Stream) Flush() error { + if stream.out == nil { + return nil + } + if stream.Error != nil { + return stream.Error + } + if stream.n == 0 { + return nil + } + n, err := stream.out.Write(stream.buf[0:stream.n]) + if n < stream.n && err == nil { + err = io.ErrShortWrite + } + if err != nil { + if n > 0 && n < stream.n { + copy(stream.buf[0:stream.n-n], stream.buf[n:stream.n]) + } + stream.n -= n + stream.Error = err + return err + } + stream.n = 0 + return nil +} + +func (stream *Stream) ensure(minimal int) { + available := stream.Available() + if available < minimal { + stream.growAtLeast(minimal) + } +} + +func (stream *Stream) growAtLeast(minimal int) { + if stream.out != nil { + stream.Flush() + if stream.Available() >= minimal { + return + } + } + toGrow := len(stream.buf) + if toGrow < minimal { + toGrow = minimal + } + newBuf := make([]byte, len(stream.buf)+toGrow) + copy(newBuf, stream.Buffer()) + stream.buf = newBuf +} + +// WriteRaw write string out without quotes, just like []byte +func (stream *Stream) WriteRaw(s string) { + stream.ensure(len(s)) + if stream.Error != nil { + return + } + n := copy(stream.buf[stream.n:], s) + stream.n += n +} + +// WriteNil write null to stream +func (stream *Stream) WriteNil() { + stream.writeFourBytes('n', 'u', 'l', 'l') +} + +// WriteTrue write true to stream +func (stream *Stream) WriteTrue() { + stream.writeFourBytes('t', 'r', 'u', 'e') +} + +// WriteFalse write false to stream +func (stream *Stream) WriteFalse() { + stream.writeFiveBytes('f', 'a', 'l', 's', 'e') +} + +// WriteBool write true or false into stream +func (stream *Stream) WriteBool(val bool) { + if val { + stream.WriteTrue() + } else { + stream.WriteFalse() + } +} + +// WriteObjectStart write { with possible indention +func (stream *Stream) WriteObjectStart() { + stream.indention += stream.cfg.indentionStep + stream.writeByte('{') + stream.writeIndention(0) +} + +// WriteObjectField write "field": with possible indention +func (stream *Stream) WriteObjectField(field string) { + stream.WriteString(field) + if stream.indention > 0 { + stream.writeTwoBytes(':', ' ') + } else { + stream.writeByte(':') + } +} + +// WriteObjectEnd write } with possible indention +func (stream *Stream) WriteObjectEnd() { + stream.writeIndention(stream.cfg.indentionStep) + stream.indention -= stream.cfg.indentionStep + stream.writeByte('}') +} + +// WriteEmptyObject write {} +func (stream *Stream) WriteEmptyObject() { + stream.writeByte('{') + stream.writeByte('}') +} + +// WriteMore write , with possible indention +func (stream *Stream) WriteMore() { + stream.writeByte(',') + stream.writeIndention(0) +} + +// WriteArrayStart write [ with possible indention +func (stream *Stream) WriteArrayStart() { + stream.indention += stream.cfg.indentionStep + stream.writeByte('[') + stream.writeIndention(0) +} + +// WriteEmptyArray write [] +func (stream *Stream) WriteEmptyArray() { + stream.writeTwoBytes('[', ']') +} + +// WriteArrayEnd write ] with possible indention +func (stream *Stream) WriteArrayEnd() { + stream.writeIndention(stream.cfg.indentionStep) + stream.indention -= stream.cfg.indentionStep + stream.writeByte(']') +} + +func (stream *Stream) writeIndention(delta int) { + if stream.indention == 0 { + return + } + stream.writeByte('\n') + toWrite := stream.indention - delta + stream.ensure(toWrite) + for i := 0; i < toWrite && stream.n < len(stream.buf); i++ { + stream.buf[stream.n] = ' ' + stream.n++ + } +} diff --git a/vendor/github.com/json-iterator/go/feature_stream_float.go b/vendor/github.com/json-iterator/go/feature_stream_float.go new file mode 100644 index 000000000..9a404e11d --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_stream_float.go @@ -0,0 +1,96 @@ +package jsoniter + +import ( + "math" + "strconv" +) + +var pow10 []uint64 + +func init() { + pow10 = []uint64{1, 10, 100, 1000, 10000, 100000, 1000000} +} + +// WriteFloat32 write float32 to stream +func (stream *Stream) WriteFloat32(val float32) { + abs := math.Abs(float64(val)) + fmt := byte('f') + // Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right. + if abs != 0 { + if float32(abs) < 1e-6 || float32(abs) >= 1e21 { + fmt = 'e' + } + } + stream.WriteRaw(strconv.FormatFloat(float64(val), fmt, -1, 32)) +} + +// WriteFloat32Lossy write float32 to stream with ONLY 6 digits precision although much much faster +func (stream *Stream) WriteFloat32Lossy(val float32) { + if val < 0 { + stream.writeByte('-') + val = -val + } + if val > 0x4ffffff { + stream.WriteFloat32(val) + return + } + precision := 6 + exp := uint64(1000000) // 6 + lval := uint64(float64(val)*float64(exp) + 0.5) + stream.WriteUint64(lval / exp) + fval := lval % exp + if fval == 0 { + return + } + stream.writeByte('.') + stream.ensure(10) + for p := precision - 1; p > 0 && fval < pow10[p]; p-- { + stream.writeByte('0') + } + stream.WriteUint64(fval) + for stream.buf[stream.n-1] == '0' { + stream.n-- + } +} + +// WriteFloat64 write float64 to stream +func (stream *Stream) WriteFloat64(val float64) { + abs := math.Abs(val) + fmt := byte('f') + // Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right. + if abs != 0 { + if abs < 1e-6 || abs >= 1e21 { + fmt = 'e' + } + } + stream.WriteRaw(strconv.FormatFloat(float64(val), fmt, -1, 64)) +} + +// WriteFloat64Lossy write float64 to stream with ONLY 6 digits precision although much much faster +func (stream *Stream) WriteFloat64Lossy(val float64) { + if val < 0 { + stream.writeByte('-') + val = -val + } + if val > 0x4ffffff { + stream.WriteFloat64(val) + return + } + precision := 6 + exp := uint64(1000000) // 6 + lval := uint64(val*float64(exp) + 0.5) + stream.WriteUint64(lval / exp) + fval := lval % exp + if fval == 0 { + return + } + stream.writeByte('.') + stream.ensure(10) + for p := precision - 1; p > 0 && fval < pow10[p]; p-- { + stream.writeByte('0') + } + stream.WriteUint64(fval) + for stream.buf[stream.n-1] == '0' { + stream.n-- + } +} diff --git a/vendor/github.com/json-iterator/go/feature_stream_int.go b/vendor/github.com/json-iterator/go/feature_stream_int.go new file mode 100644 index 000000000..7cfd522c1 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_stream_int.go @@ -0,0 +1,320 @@ +package jsoniter + +var digits []uint32 + +func init() { + digits = make([]uint32, 1000) + for i := uint32(0); i < 1000; i++ { + digits[i] = (((i / 100) + '0') << 16) + ((((i / 10) % 10) + '0') << 8) + i%10 + '0' + if i < 10 { + digits[i] += 2 << 24 + } else if i < 100 { + digits[i] += 1 << 24 + } + } +} + +func writeFirstBuf(buf []byte, v uint32, n int) int { + start := v >> 24 + if start == 0 { + buf[n] = byte(v >> 16) + n++ + buf[n] = byte(v >> 8) + n++ + } else if start == 1 { + buf[n] = byte(v >> 8) + n++ + } + buf[n] = byte(v) + n++ + return n +} + +func writeBuf(buf []byte, v uint32, n int) { + buf[n] = byte(v >> 16) + buf[n+1] = byte(v >> 8) + buf[n+2] = byte(v) +} + +// WriteUint8 write uint8 to stream +func (stream *Stream) WriteUint8(val uint8) { + stream.ensure(3) + stream.n = writeFirstBuf(stream.buf, digits[val], stream.n) +} + +// WriteInt8 write int8 to stream +func (stream *Stream) WriteInt8(nval int8) { + stream.ensure(4) + n := stream.n + var val uint8 + if nval < 0 { + val = uint8(-nval) + stream.buf[n] = '-' + n++ + } else { + val = uint8(nval) + } + stream.n = writeFirstBuf(stream.buf, digits[val], n) +} + +// WriteUint16 write uint16 to stream +func (stream *Stream) WriteUint16(val uint16) { + stream.ensure(5) + q1 := val / 1000 + if q1 == 0 { + stream.n = writeFirstBuf(stream.buf, digits[val], stream.n) + return + } + r1 := val - q1*1000 + n := writeFirstBuf(stream.buf, digits[q1], stream.n) + writeBuf(stream.buf, digits[r1], n) + stream.n = n + 3 + return +} + +// WriteInt16 write int16 to stream +func (stream *Stream) WriteInt16(nval int16) { + stream.ensure(6) + n := stream.n + var val uint16 + if nval < 0 { + val = uint16(-nval) + stream.buf[n] = '-' + n++ + } else { + val = uint16(nval) + } + q1 := val / 1000 + if q1 == 0 { + stream.n = writeFirstBuf(stream.buf, digits[val], n) + return + } + r1 := val - q1*1000 + n = writeFirstBuf(stream.buf, digits[q1], n) + writeBuf(stream.buf, digits[r1], n) + stream.n = n + 3 + return +} + +// WriteUint32 write uint32 to stream +func (stream *Stream) WriteUint32(val uint32) { + stream.ensure(10) + n := stream.n + q1 := val / 1000 + if q1 == 0 { + stream.n = writeFirstBuf(stream.buf, digits[val], n) + return + } + r1 := val - q1*1000 + q2 := q1 / 1000 + if q2 == 0 { + n := writeFirstBuf(stream.buf, digits[q1], n) + writeBuf(stream.buf, digits[r1], n) + stream.n = n + 3 + return + } + r2 := q1 - q2*1000 + q3 := q2 / 1000 + if q3 == 0 { + n = writeFirstBuf(stream.buf, digits[q2], n) + } else { + r3 := q2 - q3*1000 + stream.buf[n] = byte(q3 + '0') + n++ + writeBuf(stream.buf, digits[r3], n) + n += 3 + } + writeBuf(stream.buf, digits[r2], n) + writeBuf(stream.buf, digits[r1], n+3) + stream.n = n + 6 +} + +// WriteInt32 write int32 to stream +func (stream *Stream) WriteInt32(nval int32) { + stream.ensure(11) + n := stream.n + var val uint32 + if nval < 0 { + val = uint32(-nval) + stream.buf[n] = '-' + n++ + } else { + val = uint32(nval) + } + q1 := val / 1000 + if q1 == 0 { + stream.n = writeFirstBuf(stream.buf, digits[val], n) + return + } + r1 := val - q1*1000 + q2 := q1 / 1000 + if q2 == 0 { + n := writeFirstBuf(stream.buf, digits[q1], n) + writeBuf(stream.buf, digits[r1], n) + stream.n = n + 3 + return + } + r2 := q1 - q2*1000 + q3 := q2 / 1000 + if q3 == 0 { + n = writeFirstBuf(stream.buf, digits[q2], n) + } else { + r3 := q2 - q3*1000 + stream.buf[n] = byte(q3 + '0') + n++ + writeBuf(stream.buf, digits[r3], n) + n += 3 + } + writeBuf(stream.buf, digits[r2], n) + writeBuf(stream.buf, digits[r1], n+3) + stream.n = n + 6 +} + +// WriteUint64 write uint64 to stream +func (stream *Stream) WriteUint64(val uint64) { + stream.ensure(20) + n := stream.n + q1 := val / 1000 + if q1 == 0 { + stream.n = writeFirstBuf(stream.buf, digits[val], n) + return + } + r1 := val - q1*1000 + q2 := q1 / 1000 + if q2 == 0 { + n := writeFirstBuf(stream.buf, digits[q1], n) + writeBuf(stream.buf, digits[r1], n) + stream.n = n + 3 + return + } + r2 := q1 - q2*1000 + q3 := q2 / 1000 + if q3 == 0 { + n = writeFirstBuf(stream.buf, digits[q2], n) + writeBuf(stream.buf, digits[r2], n) + writeBuf(stream.buf, digits[r1], n+3) + stream.n = n + 6 + return + } + r3 := q2 - q3*1000 + q4 := q3 / 1000 + if q4 == 0 { + n = writeFirstBuf(stream.buf, digits[q3], n) + writeBuf(stream.buf, digits[r3], n) + writeBuf(stream.buf, digits[r2], n+3) + writeBuf(stream.buf, digits[r1], n+6) + stream.n = n + 9 + return + } + r4 := q3 - q4*1000 + q5 := q4 / 1000 + if q5 == 0 { + n = writeFirstBuf(stream.buf, digits[q4], n) + writeBuf(stream.buf, digits[r4], n) + writeBuf(stream.buf, digits[r3], n+3) + writeBuf(stream.buf, digits[r2], n+6) + writeBuf(stream.buf, digits[r1], n+9) + stream.n = n + 12 + return + } + r5 := q4 - q5*1000 + q6 := q5 / 1000 + if q6 == 0 { + n = writeFirstBuf(stream.buf, digits[q5], n) + } else { + n = writeFirstBuf(stream.buf, digits[q6], n) + r6 := q5 - q6*1000 + writeBuf(stream.buf, digits[r6], n) + n += 3 + } + writeBuf(stream.buf, digits[r5], n) + writeBuf(stream.buf, digits[r4], n+3) + writeBuf(stream.buf, digits[r3], n+6) + writeBuf(stream.buf, digits[r2], n+9) + writeBuf(stream.buf, digits[r1], n+12) + stream.n = n + 15 +} + +// WriteInt64 write int64 to stream +func (stream *Stream) WriteInt64(nval int64) { + stream.ensure(20) + n := stream.n + var val uint64 + if nval < 0 { + val = uint64(-nval) + stream.buf[n] = '-' + n++ + } else { + val = uint64(nval) + } + q1 := val / 1000 + if q1 == 0 { + stream.n = writeFirstBuf(stream.buf, digits[val], n) + return + } + r1 := val - q1*1000 + q2 := q1 / 1000 + if q2 == 0 { + n := writeFirstBuf(stream.buf, digits[q1], n) + writeBuf(stream.buf, digits[r1], n) + stream.n = n + 3 + return + } + r2 := q1 - q2*1000 + q3 := q2 / 1000 + if q3 == 0 { + n = writeFirstBuf(stream.buf, digits[q2], n) + writeBuf(stream.buf, digits[r2], n) + writeBuf(stream.buf, digits[r1], n+3) + stream.n = n + 6 + return + } + r3 := q2 - q3*1000 + q4 := q3 / 1000 + if q4 == 0 { + n = writeFirstBuf(stream.buf, digits[q3], n) + writeBuf(stream.buf, digits[r3], n) + writeBuf(stream.buf, digits[r2], n+3) + writeBuf(stream.buf, digits[r1], n+6) + stream.n = n + 9 + return + } + r4 := q3 - q4*1000 + q5 := q4 / 1000 + if q5 == 0 { + n = writeFirstBuf(stream.buf, digits[q4], n) + writeBuf(stream.buf, digits[r4], n) + writeBuf(stream.buf, digits[r3], n+3) + writeBuf(stream.buf, digits[r2], n+6) + writeBuf(stream.buf, digits[r1], n+9) + stream.n = n + 12 + return + } + r5 := q4 - q5*1000 + q6 := q5 / 1000 + if q6 == 0 { + n = writeFirstBuf(stream.buf, digits[q5], n) + } else { + stream.buf[n] = byte(q6 + '0') + n++ + r6 := q5 - q6*1000 + writeBuf(stream.buf, digits[r6], n) + n += 3 + } + writeBuf(stream.buf, digits[r5], n) + writeBuf(stream.buf, digits[r4], n+3) + writeBuf(stream.buf, digits[r3], n+6) + writeBuf(stream.buf, digits[r2], n+9) + writeBuf(stream.buf, digits[r1], n+12) + stream.n = n + 15 +} + +// WriteInt write int to stream +func (stream *Stream) WriteInt(val int) { + stream.WriteInt64(int64(val)) +} + +// WriteUint write uint to stream +func (stream *Stream) WriteUint(val uint) { + stream.WriteUint64(uint64(val)) +} diff --git a/vendor/github.com/json-iterator/go/feature_stream_string.go b/vendor/github.com/json-iterator/go/feature_stream_string.go new file mode 100644 index 000000000..334282f05 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_stream_string.go @@ -0,0 +1,396 @@ +package jsoniter + +import ( + "unicode/utf8" +) + +// htmlSafeSet holds the value true if the ASCII character with the given +// array position can be safely represented inside a JSON string, embedded +// inside of HTML